var/home/core/zuul-output/0000755000175000017500000000000015134430407014526 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015134432050015466 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000203560715134432012020256 0ustar corecore 4rikubelet.log_o[;r)Br'o b-n(!9t%Cs7}g/غIs,r.k9GfD e>YʋI_翪|mvşo#oVݏKf+ovpZjwC4%_̿f\ϘקjzuQ6/㴻|]=ry+/vWŊ7 .=*EbqZnx.h{nۯSa ׋D*%(Ϗ_϶ݬvGR)$DD D~m{]iX\|U. $ॄKЗ/83Jp ώI8&xėv=E|;FmZl8T*v (6pk**+ Le*gUWi [ӊg*ͼ~aT(;`KZ)&@i{ C2i1Gdē _%Kٻւ(Ĩ$#TLX h~lys%v6:SFA֗f΀QՇ2Kݙ$ӎ;IXN :7sL0x.`6)ɚL}ӄ]C }I4Vv@%٘e#dc0Fn 촂iHSr`岮X7̝4?qKf, # qe䧤 ss]QzH.ad!rJBi`V +|i}}THW{y|*/BP3m3A- ZPmN^iL[NrrݝE)~QGGAj^3}wy/{47[q)&c(޸0"$5ڪҾη*t:%?vEmO5tqÜ3Cyu '~qlN?}|nLFR6f8yWxYd ;K44|CK4UQviYDZh$#*)e\W$IAT;s0Gp}=9ڠedۜ+EaH#QtDV:?7#w4r_۾8ZJ%PgS!][5ߜQZ݇~- MR9z_Z;57xh|_/CWuU%v[_((G yMi@'3Pmz8~Y >hl%}Р`sMC77Aztԝp ,}Nptt%q6& ND lM;ָPZGa(X(2*91n,50/mx'})')SĔv}S%xhRe)a@r AF' ]J)ӨbqMWNjʵ2PK-guZZg !M)a(!H/?R?Q~}% ;]/ľv%T&hoP~(*טj=dߛ_SRzSa™:']*}EXɧM<@:jʨΨrPE%NT&1H>g":ͨ ҄v`tYoTq&OzcP_k(PJ'ήYXFgGہwħkIM*򸆔l=q VJީ#b8&RgX2qBMoN w1ђZGd m 2P/Ɛ!" aGd;0RZ+ 9O5KiPc7CDG.b~?|ђP? -8%JNIt"`HP!]ZrͰ4j8!*(jPcǷ!)'xmv>!0[r_G{j 6JYǹ>zs;tc.mctie:x&"bR4S uV8/0%X8Ua0NET݃jYAT` &AD]Ax95mvXYs"(A+/_+*{b }@UP*5ì"M|܊W7|}N{mL=d]' =MS2[3(/hoj$=Zm Mlh>P>Qwf8*c4˥Ęk(+,«.c%_~&^%80=1Jgͤ39(&ʤdH0Ζ@.!)CGt?~=ˢ>f>\bN<Ⱦtë{{b2hKNh`0=/9Gɺɔ+'Х[)9^iX,N&+1Id0ֶ|}!oѶvhu|8Qz:^S-7;k>U~H><~5i ˿7^0*]h,*aklVIKS7d'qAWEݰLkS :}%J6TIsbFʶ褢sFUC)(k-C"TQ[;4j39_WiZSس:$3w}o$[4x:bl=pd9YfAMpIrv̡}XI{B%ZԎuHvhd`Η|ʣ)-iaE';_j{(8xPA*1bv^JLj&DY3#-1*I+g8a@(*%kX{ Z;#es=oi_)qb㼃{buU?zT u]68 QeC Hl @R SFZuU&uRz[2(A1ZK(O5dc}QQufCdX($0j(HX_$GZaPo|P5q @3ǟ6 mR!c/24مQNֆ^n,hU֝cfT :):[gCa?\&IpW$8!+Uph*/ o/{")qq҈78݇hA sTB*F$6 2CPE|ɧJ~iM cO;m#NV?d?TCg5otޔC1s`u.EkB6ga׬9J2&vV,./ӐoQJ*Dw*^sCeyWtɖ9F.[-cʚmD (QMW`zP~n"U'8%kEq*Lr;TY *BCCpJhxUpܺDoGdlaQ&8#v| (~~yZ-VW"T- 0@4V{g6R/wD_tՄ.F+HP'AE; J j"b~PO"d.wEр%}5zWˬQOS)ZbF p$^(2JцQImuzhpyXڈ2ͤh}/[g1ieQ*-=hiך5J))?' c9*%WyΈ W\Of[=߰+ednU$YD',jߎW&7DXǜߍG`DbE#0Y4&|޻xѷ\;_Z^sнM\&+1gWo'Y;l>V ̍"ޛ4tO,{=hFѓ$b =D(zn;Y<1x~SJ^{vn 9 j1шk'L"cE=K]A(oQ۲6+ktwLzG,87^ 9H\yqū1)\(v8pHA"ΈGVp"c ?Z)hm.2;sl$瓴ӘIe~H|.Y#C^SJĽHǀeTwvy"v܅ ]?22R.lQPa ˆSܫ1z.8Y|_vg.0Mn}#~sz1}z}Vܟ? 8CWA˫kLznbWo 'hG iׄ?:JmE1y<6"` T7h|ْe|if]ޒywUnj|_$cݢaȤ금~{죤z[=_d rtc\MU&IzhBYň(&&WV#@..$ 93XMBQϷmQm{mԉ7$'ۼ𹫘qݎt7Ym݃|M$ 6.x5 TMXbXj-P\jА޴y$j`ROA"EkuS#q * CƂ lu" yo6"3껝I~flQ~NCBX`]ڦÞhkXO _-Qy2$?T3ͤEZ긊mۘ$XD.bͮW`AީClСw5/lbl[N*t*@56."D/< {Dۥ sLxZn$N(lYiV =?_e^0)?]{ @| 6+#gPX>Bk2_@L `CZ?z3~ }[ tŪ)۲-9ֆP}b&x Uhm._O 4m6^^osVЦ+*@5Fˢg'!>$]0 5_glg}릅h:@61Xv` 5DFnx ˭jCtu,R|ۯG8`&ו:ݓ3<:~iXN9`2ŦzhѤ^ MW`c?&d.'[\]}7A[?~R6*.9t,綨 3 6DFe^u; +֡X< paan}7ftJ^%0\?mg5k][ip4@]p6Uu|܀|Kx6خQU2KTǺ.ȕPQVzWuk{n#NWj8+\[ ?yiI~fs[:.۽ '5nWppH? 8>X+m7_Z`V j[ s3nϏT=1:T <= pDCm3-b _F(/f<8sl, 0۬Z"X.~b٦G3TE.֣eմi<~ik[m9뀥!cNIl8y$~\T B "2j*ҕ;ێIs ɛqQQKY`\ +\0(FęRQ hN œ@n|Vo|6 8~J[,o%l%!%tyNO}}=ʬ-'vlQ]m"ifӠ1˟ud9)˔~BѤ]һS8]uBi( Ql{]UcLxٻa,2r(#'CDd2݄kTxn@v7^58þ Ţ&Va%ĉUHSR0=>u)oQCC;^u'}8H0]+ES,n?UU{ x~ʓOy_>?/>l8MrHID2VSsMX^"NۯDc558c&'K0L /C5YDqNe~ض˸nErc֋@aw*r܀0 a {RQXV-/p:MP\<=<^越a/bz?ܓvjIg3MN4:]U]STa,@OKd9]iSCQ&s~In/SZ % 'I Ƿ$#stV \'xMgaSZNg8>e!^f%cYr]qs:"̊;isXa]d+"v=x7p.fZCg_Ys;pE&\U}ܫSh])qKYAـhhdEnU14&G * QIQs;rԩ.k83֖8Muqu_48dHܥlWW q>fu6+'}xu\Veelz`Zbym gp8펠ˋֆ:1IC8qٞ\vXçL ]X/r}7O}Wh,h ;RQ=]u00yiC۔I^3!?H6iUH:ô 4P$rT`%2Aq-֢׍qt=@x#~0)p# ы9'iri]ͪ/@繁qVGCڤr,DihB ,m 9 _$q3= A$IC"6g^4e`Xo(D*6"^eTh'4xpFڜe'fVQ7~'c L^ԯwIڣA.}H;Ë*׬=`^ 9]r鐃 -Dfi2|QwZk‹u^6DQ1&H凎c!n[mi3)WfsF:M"uҷs.1!뾧1%s,hQs|hx̗3%*v9(I;:'>uQ+v)vR/egBhAAdh]4H:nV$tHI98/)=mͭ ڐn}}~ק?g_6WĩDRc0]rY9'z .(jHI :{HG}HDN`h7@{jnE#[dz;n#y 9D*A$$"^)dVQ.(rO6ӟZw_Ȣaޒu'- ^_,G;U\cAAz7EtlLuoXuA}bT2H_*kIG?S(קjhg 5EF5uKkBYx-qCfqsn[?_r=V:х@mfVg,w}QJUtesYyt7Yr+"*DtO/o۷~|hw^5wE of7cꃱ.)7.u/}tPTGc 5tW> l/`I~>|灹mQ$>N |gZ ͜IH[RNOMTq~g d0/0Љ!yB.hH׽;}VLGp3I#8'xal&Ȑc$ d7?K6xAH1H#:f _tŒ^ hgiNas*@K{7tH*t쬆Ny497ͩ KVsVokwW&4*H'\ d$]Vmr달v9dB.bq:__xW|1=6 R3y^ E#LB ZaZd1,]ןkznxtK|v+`VZ3JϧC^|/{ś}r3 >6׳oƄ%VDSWn 0,qh! E-Z%ܹpU:&&fX+EǬ.ťqpNZܗÅxjsD|[,_4EqgMƒK6f/FXJRF>i XʽAQGwG%mgo 恤hˍJ_SgskwI\t`ﶘ080ƱQŀllKX@116fqo>NrU Ѣ9*|ãeeH7.z!<7zG4p9tV|̢T`˖E ;;,tTaIUle*$!>*mBA2,gJIn_kSz)JC]?X(OPJS3.}clݨ{e!MB,cB߮4af祋,1/_xq=fBRO0P'֫-kbM6Apw,GO2}MGK'#+սE^dˋf6Y bQEz}eҏnr_ ^O^W zw~Ȳ=sXअy{E|r,8.7uO`c Nc0%Ն R C%_ EV a"҅4 |T!DdǍ- .™5,V:;[g./0 +v䤗dWF >:֓[@ QPltsHtQ$J==O!;*>ohǖVa[|E7e0ϕ9Uyzg%pg/cc6RS`HFLЩ LkJu\!`0);Sak$Vfp~C%YdE6c>1ƕ (0W4Q>@>lWN"^ X5G-nm.8B>NOI[31,j2 Ce |M>8l WIf|\q4|UkC.gr`˱Lϰ} xr.~l-ɩu_Drd31V_ѺUib0/ %IYhq ҕ  O UA!wY~ -`%Űb`\mS38W1`vOF7/.C!Pu&Jm l?Q>}O+D7 P=x@`0ʿ26a>d Bqε^a'NԋsI`Yu.7v$Rt)Ag:ݙyX|HkX cU82IP qgzkX=>׻K߉J%E92' ]qҙ%rXgs+"sc9| ]>T]"JرWBΌ-zJS-~y30G@U#=h7) ^EUB Q:>9W΀çM{?`c`uRljצXr:l`T~IQg\Ѝpgu#QH! ,/3`~eB|C1Yg~ؼ/5I7w9I}qww}U~7뭱ԏ,}e7]ukDn`jSlQ7DžHa/EU^IpYWW兹Q7WyTz|nˇ _qˍ[!;n ^b k[);ng]ȶM_u)O_xV hx h[K2kـ`b duhq[..cS'5YO@˒ӓdcY'HAKq^$8`b $1r Qz?ۧ1ZM/G+qYcYl YhD$kt_TId E$dS:֢̆ ?GЅ'JƖ'ZXO݇'kJՂU086\h%1GK(Yn% ']Q; Gd:!gI-XEmkF}:~0}4t3Qf5xd\hEB-} |q*ȃThLj'sQ %؇Gk`F;Sl\h)5؈x2Ld="KԦ:EVewN ًS9d#$*u>>I#lX9vW !&H2kVyKZt<cm^] bCD6b&>9VE7e4p +{&g߷2KY,`Wf1_ܑMYٚ'`ySc4ΔV`nI+ƳC6;җ2ct"*5S}t)eNqǪP@o`co ˎ<عLۀG\ 7۶+q|YRiĹ zm/bcK3;=,7}RqT vvFI O0]&5uKMf#pDTk6yi*cem:y0W|1u CWL;oG^\ X5.aRߦ[_Vs? Ž^A12JQ̛XL:OEUپOY>WK-uP0\8"M: /P4Qz~j3 .-8NJ|!N9/|a|>lX9T ҇t~T1=UF"t; 8-1I|2L+)WȱL˿ˍ-038D*0-)ZyT13`tTnm|Yhi+lQ&Z!֨řoҒ"HKX 6„=z{Ҍ5+P1;ڇ6UNE@Uo/>8.fgW]kY0Cgcu6/!_Ɩ} ' Ў3)X<seWfSv!ؒRKfs%(1Lhrٵ L.] s?I,HCԢ[b C-lLG+@_$c%* _jR|\:dc5u= A@kUc\ǔz;M>dUN/aFRĦ@x؂ǀ$6%}N^ \mQ!%8j0dUo=rh>*YȴU3Q,̸*E%59sTzɟڮ2kg ۱wEUD3uKrr&"B:p`\E)j<).R&#ÃecE,dp"nPS 44 Q8ZƈKnnJei+^z '3JDbSK;*uБ:hF ѹ @˿ޗ~7g9| hLXULi7.1-Qk%ƩJ4^=ple;u.6vQe UZAl *^Vif]>HUd6ƕ̽=T/se+ϙK$S`hnOcE(Tcr!:8UL | 8 !t Q7jk=nn7J0ܽ0{GGL'_So^ʮL_'s%eU+U+ȳlX6}i@djӃfb -u-w~ r}plK;ֽ=nlmuo[`wdй d:[mS%uTڪ?>={2])|Ը>U{s]^l`+ ja^9c5~nZjA|ЩJs Va[~ۗ#rri# zLdMl?6o AMҪ1Ez&I2Wwߎ|7.sW\zk﯊溺^TW^T\*6eqr/^T77WNZ7F_}-򲺺VWQ77V\_v?9?"Th $LqQjiXMlk1=VzpO֠24hf 1hi D{q:v%̈#v^nBi~MefZF >:/?Ac 1M'I`22؆DT!/j璓P åiw@wgRCsT~$U>ceއE)BI>UljO|Ty$ŋrwOtZ7$ "i 8U 7bSem'k?I+/ޕ$E%x <Ǚ'nAM$CgWER|bƊ[g.h:NXf(}UL+@k#,UI*ߕ>9 }ՅG$ Q<#Kq{Wھ)WE)E"OZ'Qb^e=onueϊ۳SyOyH"0o">{q*RZGeJ,~ N{O b_*,H ͯ+ " On7nWuϣ AzK/|/ 1x{L͘b0j?{qq지}Wɽ9JJ ^H ۈHɮVDsƗFhW\4Be6"C1rMÎ\1geqe9#xY\zdA46w`w-׋Nk зUsYrYT3_ws =|9N] zߗ/Ww}CVBla<owxùCt{ ҽv;vyؗC+0 %^Nψι6R! \7!>8%C|:У >mpCst?yhjt9;a<"|w/RVkCU&nSȐ* l|6|/Ew˙Z`]hliXT{-( ~jc M״<׳B!^4y\[w՗>Xo?kxS$DSkp%8ZlxIkyV^Lf7ݫdnqORfpJ"~dA#:&0kk9JlU_s;wα]k]$j8hYIʼȚ{zPE) o1;n ap{8xA='%iR xe"Κ=u.º3_Ӑ9qdﲮlqr64R@9zq쬻c TTEǭM!Q.Ao@gJaU YV*yw|$nKDPiom Faob ͝Gg'gsQ|Ev^:Cd"/U( YI6Gؔ%u2I0hU,Ih@/ gE?) zmZcQdWT6V<^49Ƽ|8!}p[w%!oX>KQ8nB.AC`y S  ϠhM<>G!?RT!K?kY1m\uxvd'Q]¢x.hWj;8 bTX'¨c'pk9×6Kr;FUQcn)C_eiTy=qr,1{+Rb`[TTf1"9MRWad5q^')^,r  c[ǏY5K:]'g[]Bu-ONrX‹vBҖRN~nZBt M<tIP{*s?fm+Ϟ5`)i|E&˪)SyP<^9p!a{g\fl\aY%7<''ox>ȵyi8A"n҆-[Zی[ypk뉦19<Ч$j%\E7UL]4n&T֥G\)IWҶxpEK4 (RS4J^LKJܠ?iЂT/iHyF1y[ey\{.bmCWHšƛ}DND )څThr $SNf*j&kk&rww853`X$AjOXS|Aku%OuLR7w7|i^nN9\ʮy "tKX};<%ywgHxzxiY U47Ol9T9 aD.|Sjv, 5}>۵'ȶOT"Zust+>x.+eB*)oRx&w#."V'!m>~h` Tc;CG Oj/ X7֒ȪJuu#3u!`Yu3Skz vgi=]ŕD^l9|, ~G334"uUt+R[p}Wǵv qjQg2 0%f&>kLKKEU-рk W>u5@`6" eA:Q2N!wj[opF25 "籎{3z}~9+KPI;"]Xyrcy;{tIζ9cX D1iR'2Z^g t{6'LRZ~ܴ lT]I*)STo](9:n<#YWU5G0"`!v!*D.?QGHtSȗ-G$4 FkKVDuTԽf5ֽۏ̰- 5n[]fō彮OM|&PG> #4vCw20Xnyv}o }RLM>ZX Gl9\C׸c xtqw|kGw;G: #)FC_ XܑsBn<z폮<I@wlm'3&niW`N@c=^%G!7 [FrY™Y6DCѯ-x?nwsmp!^ᬹdP0ޭ> #o`]BIn+2Oc4=E1h H4k({z:-9G]ƌ8+^6Fbcp5. c/%1gڟdY3^|?kOT36+"k?IPb)ql;n;ЗdcvpWxExL@6|\R jNA4|-S8wmtmR}?{Wܶg^% Hu\磞։7d<IldRSq]uzOYԅ>`1TRW@p[S ֪ts8}jk@'0.x6~آ2u8t>}ix=?"<֣mi'^,oоC&ѭ4@Ӱ`]cEU}?wu(@kP 8 ,w ;g@V=?JuqkP^kg!芷61е[ zHuJ vZ0xe.G;}qa(C?nk7\ 2p4h*@p &D⿴0=[v0˅ B| ;w% xFpB;f ‹Z>#{";rx[K rw]x#XI3Y'3bp7il ڑw79|?<@3" ;dya:vxg/0v o9!Lg C,A7"RP !JY. 3f2ɺ`D!$h5@K=$F(@q_:`0i7`1,~F\&@N7J@XMiu}ޚ$i]$0pixA'|LE+t2b7ѪW0Iο8[eA $c.a+5&A<-# ~o#QZ1|!H㱠$yn5HڦyqO=Aҕ,an;ԥ]&-Ɛoz)q@ A[bߢNY n Fh:脇`w#X\Tݳ ֭T`o/`t$u$LrLLwёx  fp+V@#/6MhIeAe[ $|ԋ;~`XD-o!(z @h1pc`ۙu  0N9d zι nȭ ږ\ nĭEY#Am/e/'g_$ 8[pHYe++"ӚDQL)e LT4pͬ,6Kyomʂ ܂S9AfYyр** Fx1.6P)xT!PE5ŵenrgS&^,9ދIzm r԰A?avRT6əmy#\WjV -"lSEL9(qb9ʋKҳMLr1n7tYHwvpoA\F]Q[1p[6B5.Jb->=;oDX'j#Z02Fy#(yrdtڴ-h"ݷPi2_ t^dDz3RΗHjWȯkT'~%,ʐXY٨dTYC8ȃgHZs[BQ7\rv/Sp3|PZu+_ְZVᛈo5YkjjQCl)xҀC65P Œ5߬bro`^иo챌D74pF@b4lxkY'}F8Ó<)ʣW ȍnv}n'Wwmc ӥ,p8]'@🰎뢼CV KP<)n\2=ACV=~ʑ $]\e"x5g7QAhDĜ;4pӡxu&JpyzIBDC8)x`Ft#ЈG4z\D29 "u}SԐ3HͫVzxgsFB X70or  9%9<{jxO0=y:xPl/|JXEЀG=͞P .1oѕ# 8Ŕy~ Hz$O[qEƄp.Y7QCZo}9Ũu >G)={ FkA \Ŕ\8,Ja^`!!j ^WA}8`B' GM7zȚBuU)u2B.勺8ڣkBUbF~yFЬ uy;HBJmx˵郵G¡Wك<jOnT8X[7&w=YjD+$@=:VMӺJQ 7ZPL 8Il;JҾnhɇ+8&+-=-3ї5I!,frNugB=";c KTTl`eB O#H(݆Е!ی)aPG:tq]حFv}<ی.[mA,۞V4Rَ-u u ٞPi:;lA(_&oA(ߞP4B-u u ݞPi;nALzO#ۑPo BeB-'[* [*'Tq]gt\]=͢,OU]hjF&eV֯Y>"2,NE?}4'F A9Ǡ~=89?9G±_>1UAim4M4 PA, 6)TJ<)tl~#U\Ami>FWe=V5Ic/JC_`Uar.Dïqf·hEvi<0CzeY ӶIo<ۡ 8=[J*jn8A ӯP7*@1mg(?bD4 U^7Rly[V?DcRj#;,*W>Pt0ƺ+l4DEᱷsͭsh:e8hO<`Ξ,u[;gjwX!̠/F5N봌=}YgZ^@w62bqo˨1vQ՟YhU׿U*ox&I,8j0n1 ҧ.g@,?Mq]J_7ejA1i\AX+$qx"o= lX`!i1%y/T5rR3B;K-Gc W o홌rk w ңk<*,~ͮ3l\\4x.3ݲqUV=z_Ji}` FWE"E [EF C&DZRוf+ uܯ5WΫ eO'O k#_{P^f+NJN}iDmKyp3J0BMҿ/G93螘=%UYK?Ÿ;f"(kT4bWj 8)(.V!?4G,0h:dB-Xd+Nx&! _o8ڿGSt|A0`ܧTEttH7o\N&D]HM9)f]x*kh,򽥟,퍪=3/A#`$3i)NZ#b 3x G;ަSk+OXtz*Ϗp'g +gۿ9cܝ]B~(u:ӝcM7m>ꐝYOģv?VG'c8K:KA\K,x-|{NsZ.Ɣ8?~kc‰d>ji||-g|,mjs3V֪]ܳ6f/Tu}Aoe+J%;x_f$J|xGkFB5NguH;g)}H:@@C :6^6k%M7i]1a6kj_b:t#/k[1Q8hC-/=Uk f!R0C䷈誨=u@I*Uf`X`4.΁[ N#vNRQ`AFX}M 6%vY)+M y=Hh8-R h ՛PAF~](!0lCK/6n۫4{qqxv$mTn{Z iZ)*"uy ySJ)}yt;:䫚9ip.lj})DUIqYYL T%Y9Z}>Ja>Gʟ|&}f8hж?(t__ ӏ5(D#0ctDyZYh4ϗ;D[c< Q,Z6o ӻܿP+GT:83뮑"ОkamgKԿ߯{G KT8MG)f,OB\{+1l{[ W (WxUq{ǘZ2?xd+d:B7T6 0u\PׂD5jC zh'Í#wYZ0b{r G1N" Ur{?g]08c'9%R?[@ȼF`dTzSh<޷gy%\?Io^- @o~^uQp-eysfͳCBFaKû1W|.W :Ϣ~HVIZֶ4xjR.Pd^KMO^Yzy=t*%i`wCr+і#Nʹt'pz|dee\(Ki1-qs}25̰4!,As _'o&@|ދ9@B*« AP_mxbOAe a84T;)`RKhiІL\Td=@h-fՙi0zD}f`V nZ0~ETnپVv_2?p,\t8TI-Іb0z*]Rp[]Ĩw.z_򮤹d9˪}yOvݎ_,{L=YM]$!&?fU_J)yrhU})1w^y"N =w<z6IqD9?~)G/1lSǛo]Xкz@'N9r:vduxoal;X̖`/]Ys`sGGwjk|;9T.cİηprؑݿb{&8s=f /Мfj 'XvB4e+Flм>c$^vHG׼w:\PZKe&j6Ud UĈauhWLcQ,n9km,N<ҩg(^J1ؙţd_- ű6vHCp jpq_5# ǃ{1$8j5GAisL;c`Ɂ(pCJZVIW8ohƞkҢfӘdF"\j&L F{Ҍן9Qu^|$ӎ{*WpJ*o(, UFcpjkrwۥ+.et$8~Vf8p$IW8J։GhpYҵ_n25 v`yq{Md JűF|LLiP%-TeUdB4ik#5u_J*!Sn$őb (MCv?ƘeyC n/c-]!ZtuoB0-IեCzU:eWVR[j/ ,$bsRrf+eҔƬs aߝFWpϫ wb$12?T;Pܧkd88&c*JDK!ް9O#cIQxp | Q_,Yob$]E?P8]x:L9X&=3 jo$ L6s0@V#ˁ5@NjRAszY!9Y lf"u#iE?0H98TvLy+Y%\r5c->|>hc(p7W{/E98]Hsl .SBNAA)4,JZ%N>%eEbQYnWůdoVѝjX39}gXfN 81^ DU wjp)7}ࢆHL5 b$WA M0JNRlUHfOæć8Ç Oyl2OC rQV!$rMME#4q#nv#r *)*iX ٳVRZf[ H!_n(Λ]>mPfSzE#{ق3KY-h]m ADHxi!$&L.޺3jp#8-ܢ;Y\jĹo<\NU%YKK͒щYQc-H"N0d x n9YNHH]\'^|i$&9VkO!ǩ Fұ# XۡS\OQRp%μ. #Cx`%F#G ŢP-F<9oA ݈z ?fR #KdWsŚ Ax\fT0C1"e]nW$c7#@>#0p,G=JLn D:2 AkIW8_Hε\riYŘM;!3˒{q܎m6jo]ByJ5pS2L8`$j+xutpkepuDb!찝3I/\=(eЃWYb[hд;slFH2,k(pF.j GD8 8G/:Q kUpaJXsID(ޔ{%\ E 6Ms٦1,4{ G7bΤl7Uba}I*t3F8(|Z3}I׾ۮ[%Ran 9J黿ۣV>$I8xL֚3yuM۟'e'daIu̕On-,3tlmL۪-d=F5m#hr*v%bQsU6DE4r01#b ;7:`(v1śzxbьBsk~6=v?Ph_  C\v{G3yQ I7Yq5N(Zc)-u{EHvg|܎o+2 ^qrWS.ۨ=$%6:?X1n.cd_HFӓ!ƟpkG=wN 3Z}BkPR IQkJx lpQŬ#Rl KtumGl6.y=tl=n:-(2! G.̘?wq2%q< Ym2;w-DEO&Ęxמo(zL>)lR9|{n Bm O ,܋Ӯrc oSWpl!<Q3-PCeyv[kt5}B PP:͒5_nBY>ow(ԃ{'ǃLFc~b| >i)q Sn+Nx&oJA*vDMX 0N *.L\I3R2\ LZl*딬Z4j0-\ϕ* .:\-MTawuC6a[:qf?MbFhpfoi 8]m~5ɼ|py.dF'^oڋQ^w=wM$QYG 58;8w( 8\=mU~+ s>e3@j@hfFyw|eY'MFrN>&\X%+Srj{Oc<2wTtd3-Z7OlTUhQRHasC 76"!S ^0xe٫ .vr5Uާ,T 07}額BYגH8Ź_Ԇ~-̆rg(pÛ>m 6i"]?rZ2$Oc?6;)ϷyXA)\gegT6dF,,wOR$xm#1S)'ƼO jƛyV޻EО |t͗\E L1&̍‹EDPM!LHO V'\?:ϒ:ks,CYH h7pfvisCDn΋L-]FFy"P*ӯ/\%>n /Tf|<#@պ(2iFs=̞4^n)p4xS']H5~,"SƇe5TՙCl,(sj9{g*Z`F˚d8}ǰ<=j:}g_{5~Yt$&#NNs:t:m720\u&\!@B堬jQ[H'9-t8 *;pl6l-w@tEq1ů6A|%:fɋ3QgpDXI2d3O:3It% R>U^# H2XpӂCF9i a yp4 :Ҥ2 BkB$8l7#%h{k<W@{c/8ZͣͣW E)֗ Np"f<_֊yL5ۭ^K~Yc|o5wܗ6/-rJI /͂3ee45H ǿtA5;sct4o 5rT0C-i^~쁴Qf~2XTiJЂ(>я4#K^l50+0MV:wc3cQheXTǥcA}n`+ xsmv]ɕлv;s3&EuO1*p>4U->iGzq6jNj5R*Ŵj\hU{'V,AdZϬAN14ߞI4E- bvL< l*_mTI֔ W!sV_ׁLwIh11bdG#sOءf} ~,?*?d j~Ym1>yZdP D9Z4j=xaǭpDlY>߬)Y#:1|Wzc(](Lq Jx_"iq (>m׫L+7x ѹ{".,(ݢ oq%@HefS.޵6n$^l!#hc̋,yEi&WMH4,~]UUwuWT,"_YT0.8SĹIKg=O7a'8=?"g=84p>Pش(&--?x6$ޞHM` x>{ 6ii Gw1kI fYOT eMo[.~U&yjy!l)ީ=36چZ=[=25Rע&Aergl4LB6k=zd zD;geh@wۈ)dtnT_pg06x' 's{v{\6L5x_^Uoaz'~J'qat 2o8? &^p'W( '>˗50yO{דm?;3iŮ h"?d|Z=#>@;n9G+!_@ qLmB)FTSB4oRiYa\o Chk\,#S(IhOfbW`F`~9U~ѼR{*||Txo M.\bB̢dC#ifbi/\;~R_aLiYahL%lyGnGLs3s x>PC@.L7~/_Yz.%.{s]Y8f,5%[9r (nGY\O^qHpULbqPR \iR&l$1eڳi"3 >yS}stDGXuL&_/37gZ&Q1 w9Nn`BH\&f9Xe\N AJ-?x2bB9̘J:89?eq>]|7 WNJWkyKSuʥGcXY_TyJ5`罹1E_QH&z-SlXs M>xڕ`dSfGFAB5lhn|~k (o.{VhF9 V>Z D * )&b \v)fjz)Fj6进ӪfIJ|<IکZ#bS5]hJJrRA?R;Jy  BօYa`p_)^R`9A^7\garyswyIػӜD8+_.DlW2g|76k̏ J8\np,%+,N ҂ˬgJ7!>* ]:-NXxLK4yK7\v5I)W[p]Bvu\/%UJu*0 J+1U{6mdU KVA(u4<nǴK hQ Ѭ[ ъvIi&QZ߱~ :(ԠcLvR0:70AEV nh8捔BEǢ .ERw *R]:q.q@wGdݓءpѰ͛+i֘N ?Ӥ;dLxeG77akS%4P7x;~,|~u ]10>T|ًdg<岗|0״ؠڇ編NbT;8ۃ"U̓Gμ=Ak69lYI {~܎LYm>'><'jgY'Ե$҃13x9j wv0LY Pzt R [J0kCWՂLQREg,t2_RNB6t(>˷2aOK]4,+?H\?^.Lua]AOUbKqFRiAZt j0kvfsDJG . YM$/4C/4.4v}K23^ټ_F .4AvDK2p>ӀP>ZR&bC+2RvV'SN $9&N> %4, [oSuA +j/~j*"!4D1<*㸢3/^ fJ1%F CHSbF PB^E8 -1Q mXkLc 2ixd6O&ؖ'"I9f1PN> =(ĭI)AH%©v$ܣusr]+):eqh5K^ \@_/n[d5(9P(e$`%J58g[ "0^u^Uz}:<:IklV}Fh.]L(߁PZ-M+eB0"VP.u$aV`RN@#1.EV[%Z&zi@[Nֈ`6SFE.Q0e wE hb֦[: юA8%N 1)2ǍqBw"c4%Hv9h6Pn/P3*eO3Tw6"4ǷBkS_$5>)Z"U#f"oQ!+;%+"/__680?۠X?rDL0>85X:gv49~V듪eNZBYRY#Jy8uU:Ƨk'M$y!(P 0Tہ~|p3&Rt[H+ސB#B;(F?SAkN*ۧ@'%M[Du]F 3֕]k&\*%^kDg:B) R(`KIʕ"Ddp+%G 411X8B:L)MűT?vCMřu?y#qVKq* +A`JDp %{yPp$jqF>0b"pbj" ""0?8f}l3"=ଧ kΠsآ(X,RDՊ8R2tkib&13&FFwnȧi[^LNi ҖtBxpoc(&eY%ۋT' NtZsrV1X4zW|j3Dea}dTW&Wxcez6QI~1I4fa=K3\fR#͠pơ@ 6`z@QO4hg<7F F- L[~Loy+@K8"O7w}F={1$s:$Xp1p% )!~N S:$tp:0>:x ,P9ࢧv4T=FVT}>HFQK`rڷ+r'\nEQt%9ix &h*nBkuI_n27CW>h8L/A-=.MhzK1~8~NMӋGj}T;xr~bmmI!9nTzh>'Z<|`bZx$ O-B+0=0!QkP{ m h7 $7nB@k=|&Q\ZVV/`Lf:oPñOF3[ .ar[?G0m(1<V"p Y9;MaMzX&18+me?c>+|NI^xꇪiV):*R3-aq>k2 uX# < sd]](#}OR&0>WIF17%XN Ԫ/i5s?b6*+CݪZG0l7B.0֔ yd ^70o9h7F Sv[`@D4B(!rcC46LTر x|FԔUe]S?1w]\jݿ4T3\G`WvL̪B[z_ɝs]r\4`:(NF_):wo^.3A=JE0ţ!l9EO=5%}Enl"ݞ5m`;dWĢf2N\id#|V9ye޷a5@g1@  P/k46OS|;^g<)^mI[ SgSPT_`)&A)nl gh&j !!`JՃIM zlF6. a^+7.Z6(oѐ";|b4t5[/s+צ=,N<={ޚv3ZږW5`>3ju^UC:+L8QQxis;L$=-uҵ9sNΩ@S5GQ'-&I17>_D[e>>_Dé'qԂ9@w*ҝab1n";{ǃkaw~ǻwow̹ԭ `nh88.^wWo0\6eb}zQ~~7@m3Oa Ѓ(8^3XjslnP n5}ūࢍ GEbc`N%u;NtnXC86c/{('N9JIJ(!ҰgmX,z6Tw '[ك[c>g*,aB_-􀊞bT3NM=cգJ h&jM?X+y>^h9_wP!Yf&-Ѭ̋$q[.RVө@Fz 8z&{#&% TOP^ W\BPkzӶ#yE~T9 WΈ3BOH3A;>t-bGXm]ٚuZěl;6}#܂z-'cփrD5n{;{~Fku=fhb0{$8b j\YR)dl_ؓMT^AUIj@)Eۘ\HaDP=0T Lu?#C(/L?]^54<溰1_Yڄ`$[?"j:OMue,mwךna?%Xii`'/;*Y&t"vw/^*>'gIT<{_3#K5g~F04a/_J0pVϏM)~D,.UqPq^פr0)W;4)ϩ&GЇҦ]aFc~u3BbL$ؒ|U֕oL==:+OWLL0( ZxjVGN0kXA}1oOeY+GMuZ#cs?` P{ Hm?&㢵Ƶ;aJ.\/&́ ph2c ׎?f楗\71:)X*&Q/6&8,I35)`x۶ޠ_ciG -ǔ|݅:$%XN }f}IN$ ?~E k t&Kf#J+Rw"µF Ϸ 9Ǣ1y *ۭ啲2(c:|zǁ )w>,Q:ncV;U_Rr)Cu$l~Δ_6;fw>\Çe?we6L*#nB)2x`)nlm{קͫ7Gaʗ#s Կmӡ8q7wwWs84Oְt=|&?Vq׊JXgꭼu/+m$9EpN}zî]OMEI}#yGQAX#41-EEdd]_ zƫUm/r^OXLZb|O~~z|q|yZ^]lQa[eOM7zX,:(tϮZ9(7:jB V4yJ_>0 8 o9\P^C4syʩ)}#x%O?0wGΝZ-ƨlnY w^#*Mzewe i[/_3K#\`Z=ьrJH@j5k@j"tLI\hb&HsJp<@ LɜK#Gj=f89AeLxG X /Zw[[krS_o>m]U wŎbb$|v0.ٷ,z=+|{Wކ0 ̒V0ZhHeFwpISx:Vj[ 8,|(z}qy VYm^<[^'ɑ[zfdRA`0/ӄ[+4'AG{a_D_/ Ɨ| 0L(ŸG|N(걥+" 9,0g`?n°9Qp"%b WHZp&Wa úR][] LxS_z]Gpײm-3 H6 jk`]4,\;Je,!#6:ų N.H06<˗ǬpP+xAD׭bAN v2<8T.)DqDJ,ZMAOƐP hS-xxGw_F[nA$Qr#UfCʾd\Ɲ^&#ꮁX ŇǧzOn.翑,t;b"ӊ7&,2ac1g!NEo+!Pldό\E-FVKl`Lc.,xx)6a  B?cbuV["GL<}V[G4D-x_77дfyZ. RKԿ)1gen `w\ueϕ|CE$^Q$@7|IFz߰Ӻfy_6K6g|t_ٰDJLK&{hLDQŴr5|clXry_+=_HEZsR/D<"%$crL- } ,(Y?;e@gSDHBMTK䙠cK8p|8žtx+[$ "SKV\!,*Ihib*'SΫFejO'?9^xj"PQkI"f1j=c 1[LƑe:e|㽟g|q\aj X=^0a0mop}% dt|8(KeVf"|Jte1+H].QAR##ަm4mqGޗVszQc,wj/~?.xmFXDls~<b;r{7zEޓn@̱%IO2g1]7tE}"(,|z+[_@(2LRwlLq}٬_<v:8ThfeH<8mc4G١26eg9-cŽeM e5s.5,EMQLph4mqGޗzkqoh HUf:sULWq}:ALjhKJT:KX樺΃f9w{7ѶjQ=|AHI˜z3YR.] i1AϴXҰ; ܁ ҁ6>64;Ju8l^.Yպiȵ:[9OV,7)qx~͍ >x 2Et8&j>ZMG;֏"rA9[d>g6 QʃPX-NF˙bY;r{EZEޗ͜摘(_8>S׫I^S BW[1vxEޗr<'Ug'Ch8!B%RԉtoDm"rmFcg.; ~ɤ$D J,t ,_a9q}YwVJA I‡3 6+gI γZLq=,pyM%͞W׾f颐,1Յ"'V{p4?*N;o⤈8)/8}3&'˼;elh۸hnmX>~%yxKL,-.E1FcBю1ܸa*Cd`8+;8s3VMMѨlFe.pV:{mVP$\CgcҰnpmW{X$t6?/gi CM"M],Z>#&k$PfᒙD/Dv4h}P}Y(G$^jCW9! 族~h(09 z[֔Z 3PicL9ylHM#q5 GЅxӴ]y_3, N (Ԓ4B+mM1Dc(,wtu!|W/3ן揟~riSN7uB~X=TNSNtM$D35f$HF"Rq$u[%B*ok݊z-BxQmMm]HZxᡈ3)s avj՝rĴ]3F(.b&5w#IgmSLQܦY;+RGiR:G;EG!fF(. a.xt]`zl"w$]~h(fDQ]}c>F%x'g*R$QSVڰYEMA`--LPĝt3;̴ n:ZB $&J;b &cw" D~nL݊zso{!ƺwiI(+)}n#xi%[K3B#=I+&jLuzqqU:]Qo)b:cj7Q܌/x\c!18BOz/OXRoNzmOz14n##yn: =$lLhۍ!>==ҍ>! &loRUß^zYkzYCaYku7Qh&/z-QxDQXqr>D/- 1 Cjƛ4=NC%ӸՒ udeC!z`' %acgW& 6 C%o kT[%Ӹ4jDﳟDaߦzc1pq]8|5^AF)ǧq]ؾm>KjۡOIkKK)1,7LNAA&UOezt4nbD5\~bggoE`d &IP2R>pvOO8^e܍tJ(-yM5 $`rN=0G VM4 jUJq K tiZ(5pI}mUړ{ZPS4FuEye] y.TGCb2Hjq;K) ct^i~ Eޗz8u/z~tHa_Uwc 1}X#rK*2_V0Ǵl y_޲5.XYQo.#^$c2i$.c[IWG ȭ򮮹ݸ>喃] q\R~H?§+Ԓ\iW$EH!f\W\L7yit7bG\nC H5.}d̠W{_XW h=^c`+ (wrMX6{PcRGB˗E 1a{3. Cd^YYq+{t:ju^_'[.a 'w"rqEƧyM 2|(jX}~z b-:tTm=sa{z0肝T(WN 8G0X<}QpcA] ,%(`%)|NV8;t$$ hPe}/C~ ! 𪗶f-lK[`0cK,{\qr|{r!orMd.fX!'`i26s6BFik|_kLx^ԛ2|(һzW<7޶ihA$jAdM\"͓f0 =+a>!Ç,Nj~]>` ftLN6azJ(9Ay yo y p@}CA%N2[afS X#2|6 C'XJ…蔂kvM@Çʁ=gsl/";J퇚p+a_/\ u;~@YzCCxW!Ih]pHс],֫u^<9-jr\hnW|w#~I&6:&f6X>ԡcId7I𡐳rVd ^*hgϵ٠C_8a eh 2m)i6wBm>@Çʥ>҇զ7:[إ֖d! ,#U^SY]8a3gseW s߷(<^s V#Z$h$C!'<o bܟi`B߄aP TʦF ty27M=5Vnl6]*.+')-r*zO)9,^,ѥsPaɆ<6|(ės:>mUلr3U>uZi4oXv|uԧOV!{ In);}^oF[rZ]5WgHMnl}^+W[5vU0mh#X EǻFD<{zfi?ΥJ3 krv8&;wձEdsѠC!<;ҦCG6Df̀}]ϡV8rY71!*>!d.HןlF2DĀ1F4ie g4=M6y-lPlGB˥A)Qs՘.x'=Ђ>űWz( ytP;T  j޹M=7 өj9!l"EJ4ȲUQ)~>Q y€b\C1бRPŌTcA9L}>1|2<5ڃ9 d+:tTm7$iؐnx>~^ڣԨCǾ ]>)h7`V8(ڒ[q.6܎+쁮RPP2R^M_tH}JKE}wh;]=Go:vEΐCG:z&J ^>ؖa5}no/vS<ޅoI1 _kɐmAJQFLс<ٮ m3^π7GCGŕOR]p}2۱hFxС;1ѻoClCVOr~A6;tT|IS:kLtQ)k4bޛ4EĽ =;ߣ|ڦ#.b_=`I^PiWa;dw{mO4{#,Gk¦r*O?|A߷n;X tN]f[8]% a .-46)Io6Aa+mt@"I>ތh'Hi$:/\ j* J'/JR,dO I ~ޫ?,]d(CA_  i =R[`^Lnz_8ٞƆ%'7 OFYt2%лjţIQثz(A͙ 3S^m%չԭo/ f r*"qr@Tgs}*.bPy|H])2SMǀ򮞼Q˅T.c yM2| au 8Bs U(޸|z]kr:*nHQӚթtW4{+@Q]<]K ZǨd:mxRo5D674Q:B 8-gB_VțpLjUG+H1ByytZުD h0kD5%4N?8i)lL 4|(ts_oqIj w(ya$" Q˵N5t<>rj2%p~#SR!U2*;HS4wudUьrO/WdKSfHYvj eۑ;B>:l%WjPڇbĉ\2ג<צ   O:Q٭,xpN)Tס=,߅ }ք8ExcerK">Sq<A*NWD+AÇM;D\e+ʥFwlSvE F/mNަdm hPY4HBb _@ÏDWI).7(@JE1"$L`Pzt xm1-HV=@H:*&Ib rֲsL:&UX1řӜےګ-З𞃺s"91'oxp o:*vRޖ^$S#}!3Œ5ݩ8k^<$R= 9IĦ9s]3tM'ۑDۑD3+>'[XN_߭~'>fp&DeZ`ER9(w/EV0 iCЁ̓uF:ͺ@lϲ1nq|:u؜3^$i9sS]=Dhh,R8F "KIv+h[si5)d7ir*7 0QӮjeB,|aߔ yWס|ܝ+< 0B3_\BzoYe :&Gt+3 ?!M&Ňng}3 ;t4Y7}r-C*srUGa8: \#G 0 A%1푆Ȕtt'W%kto%tۘ&aonI8_ɽQ`D}Õ|ED 1x'NnuG_q#V= eX~d,~tdYj#拟b6u>p~|+ck:] * <`5Q+$. b?9]~7fSIsIO^v)$E@6m/ l "j7= ڡ(8,wK A1aQ$${`g'W:1^e"eQU4gtډiu*:ԗMdFO/ JCA%: Csckދ&I'T ?޹ ћ>0EAU$ l4*-UF૾Z:HDț)Jx0^_NHld ED'9)ńAS~̚Q'tF T>Rn=rs_#RK3c@ I/}>[왊G|ڸdFrEu c'7v:C{eWl&̷F2< 'xް~k0/՗] W68a MNmNI˴G3KF( 74A/~<Cm/n`8|~E:ɟ7Wڶ:ϓٗ4rФUhR_z@v~;oقHhplm77w߶Ƒa_=뜬ӣwѷ=;5ޛ=*W|􌶮6^YiҎJpr'i'Bhh˷uNws39}o?ړ$ZK' trxwA4kꎿwE%Hۦscv6i?qɿbvMw{~ffNcATcm%XߢdKmCyz*bU}ɏ:i=||amVj1翈rr|c 0vxt$ߗJk(7tlVSAf3<}@|b"vz(xP$%9_5&E%0vlꛫÇd=(e1ĬV"y Fީa2فV ?ޠl |-7QX7[Cn6ouF rXs3n9:c?ыEL"5gs\HPA8uOĈ>R,[wm_-E-ڣ2f4pV7̮d,-Pp'gOFJ@W %DhgHn4eNӥYd̋ F63 0X5!c 5ڗw~B1-p'TzQ (`,G2FਤWD٘^A&a6.k؝<ĺ>UR/RuMs땖Y?2,9\Op[Ga ^G M `>V:5p=׺\faFl<Nf_Z}҅]Ľ{j*f8fEH3\ft/rsHV8}laHsk+T jp{m5f!uĭ`̲ ˏud) >Tp;sviJGZz86-+Ķ4rm7|-++B2B؄)27ԖJ;^], '\:e 3џoWs4GsxbmT4F*K+Ibf %Cz)f(_^*9MF\TvDo:y~2Fق"KhijoOWh6kbEc9ڐ1 GPTuP>6\mFOqC1]$K1Ҩ,޽G:yt~2F_c<VX7Sc? xV-&Ua X3fx4(4oѽ OumLWԟ,qu7@Eׅ>8mlUe?a@7~ ~pA0X7a~/4sǥo~u0]?K?f~y,>S@f0_!mW)jg훣yMط|C=X/`@;ƻ1Jn/ h ć׋-I`4/|06/6k_.,U;!ۗy7Uske>k^Z>l޸xX匿XL/薯ڵz(^z? q)Hlj[Xwy&-姫*.pG,4֝Y `E,3Y+`v'¿ ]>M=㫖8vcQ6CV tSU7_gL?{caX+PG'c fC]X 4K'y.(S*FSuBdZM:gH2 zea׵-gv6ĸexE= Ij+dhʊ 61n'11$uz4Ѯ޻-;Z]vDkOуUi9dMIW߮VwxKz]ḞԬ7^]5^okGvΦyWyv!o`Hn&a}|`L%=ɂ2^% ږtF ZCo7 0:&c }މ$S|[ t rf JPfgՓf)Nc~oHa}hn R LgΙK2 +=I{S)ӻ 08`_@zGU h'!YMCO~Kڕ!> ޭddzzr5ǯKeZYl;K{(]8eQ٬zu A22Q&2d`>0$jjLL>&U1t613D21 ݷaK >/мx= q")[t.ȿnuP_v﹵54}9:r%V]ԏb/gzSMk3xeϝy 8C,:6 l2RgG|hWo/~Va?'+2^[G.}В`b&b ]V|a"pUѩ 3zW|m7ʜ=b6G\ql̘ʩ&QXYq,UA¨OƌE`/|Zzo.!( 4tO@ ( K խ1  t_/onKsf]6.'\pvH!ߤ.h`(YNS'E{`/E阄*yrҕ9I'jN8ٯ>5P-+@%Tl'|:y~2FPI@,C^r'c=4KRL\q3x]u>J^*RHʩmI;(pL !Ƅt>G1f$ {R6AB5SC'o̹1Fф!xz^{]00n&ߡ`P0*|{}v td)d',NXdE,,cT;^K' P-)3BK’A, 1˛^"Ŷ~TII4R&Ye,%ƌm4ׅrxrZ܆{-M@xhkmeOY.]_GߎO:D_[CO2*FmԁNԡǾS/b+'*gLz}򤌎Eo;/8Yvý[Al6tcl˄gƅ4UNܦTuuDƹ2E밶!^! xЕ.MK3IЈ3ZTDD[L$ QBՈe]O:yd$pT0`IË,ix^sZҵwYtZʗov}b::XK` \n)Kz@07dx@0Cri8F^R2IyzD 4dF&8Z$,Y ̒B.d& s%0H)93*#(1-H PMYz-Z[ p uaɹ-_9@'?'c fB촔K zD4 #?w}* >?nq|=yr!c 2]zSs\ܝ^ _@q]oݷfÅo[5hGQL>1-w+<) uݰ*d=/.ky=me 5y@S11]ʤf,a,VK bri=LEIKyր:(Ldm32ز$y}`_.fyhݫP7nTE|R,6N3-$lBkQn(*mKD_`Ӆzfk6P"}Uf)9DJ0D nA)"Ί0 %({Kz T_%=1{2iUsd$`yQBm#GE00] 7d lm+%Eݶ/_QmْlʼnYo*r-& ԩW3BWܩZL9kc2J1"^aT\r`h#N}t4F {ϔrXz,eU XL$bދ*#zL*PgcKʈ)`WeO󶾕ə𛧋_{x3 Q (Ku+"Pjx`>(PₒG +e~E)rH]6IN>]7j?&oťi)IJ<:/_A RLVWmO'0}جjBeq@8F NV\"Yln;NppLc X.[!};no3l-ҞLjL[I*čVqj.$򻴫.gօ6>L3_ۊ bNjUMy/`SxTV-?*M\=<1&N(|UJܠ L ؔN+I|_Q^NѪ#HX/8;P8 HF5)vQvKzq$)!wLqQ* 0gre7\iY06WrG7p@oջDmJ`==|<0҂5\`RS73=K!yxi Zاdc8 (bp t ,|ip0(g}Ixr(Hrt,Hk cp22S(̯Aj 1Q8g0+UĢrT ^!Idm`پ)4v"  b>Srl5ǐ bWu>+JeY{rYGg`*r: =to3K`^߭qD{{43+0@6u˩$Nި>4dgOLMga[\^.sm}'f9vd&y!<&]ke9F Y3d 8mhGk[B8F:WY@á7.wP8(v9=0t/>P狦E6/~riF, W  ;X$BamjY1!t?qǚ[mB3{" lzl/^1b2>'<9;~ $C⭹u.r7 a2+4#I|w Ko*J`0i;TNk+n;ÓP;7fd Z{?oskm{GI00>5}_ g\pE{j?{9ujZq$/{t*a‰42#ÒPrò(gP}ɺQ2HIUFHwZ2ѹJpHfxv~z-mEqM(j'dE@ꂦKSfp2rFA0 {hdGlKGcj7X4+Na0لV-^^w8N:B#389SlY0@A 7€B Ǔ 2*FA^?Tc[c&olA9dvp/5ݐ"GqrqJ:J5; u"H)|:xf8T3 dfG1LR80M3]̓CN#Of۴Hꠍ_;*;8yH y m$Mn\ٴpl ?] eu UԿ\֘3nfվ*i-%Xќ *Tgl h\f E[pY<25N#@dL73 c荹cGhdG l/v$"=1*qLXJP@ESD XL J3tx̸c(׭XuX4Kx>AEL^h5eJ on *`c쭢Pg⊨  nlS l&2  2Y糦D: J*CDMWׄPjw2nwv %{}q"~z6ߴܴ`M_Id̐2!iJi@Ե1y j4+"x  (#42#0KSt"%3 2Xqh^slaBfUXP0):5Q98~f\L\mlq>O( =' cxIx47tv0ʥaxC-BA3>A5퐓ӹ|v0x[h擿Ǥ2%oqVKyn%#423@_>eV)pr2@^,:jp0X9q Y,1Y U崪_ 9]7 {k$j/KHlةJKY d 0%):Hؤc2ҁᛊ}i;4rM < ^CMJ2tst7ѰQ _ɑ$IdJ Q0Qy63Tb!-L"!]ᒂ=0~fP(zQRfR*TL!!,y!p($HS$If$@M-/Dʙt[*`xtRPP4 "Us}n'8B#38XFVYfp" ՁXZoJZT _ A+2B#K$Ɛ)&^k+42# Ѯ䤲:ej Ɔݧe8X^9B#38Cvl$˄3YO[\̦n6Ay:xn8O)|q%,\p3Ѿ[}YƦN3#j3q[/F$GhdGAߧym ԫߧ'%IāEI";2CxI5Hh5+yPsI"enM ـB9c+*=:B#/8$f֥qEi5p:!aΛfhy5I>Mi Q9`@OJ Nj:R|6TE۴@02vx̸U~VZ $;B#38d mKR@-3)E8?muO" 4&K8(8l͵rx*85/b zd$gBƫZLh"BG56Mvltp0=VuQRɢEC pu:)GhdgҫrYw5cf~n$5-㠈"=TkRUujeaȸ׸$όqCܗX'wQ:: ӹ7F}oAƨ9K8m)^&FT$J%{Voeq| 4Kt#'U> vV5:N5όy)[;Son;JRq9B#/8 _Y)8Gh'pKTGNf̌Z`́3B?*VC!OʡaTDZF q[I`KIg +X8/1^ MḼ Tw\[x \C8]K*Np\-5g",bJ z5RbegjY˟%d\P$/ /d*SCAhD|3)`-bLQ@-ЖFfp(㲊hB*6 Zw F&kGhd{:o[!/8T9B#78N*wXl'Xr&͙8(>7Kj.wެ͌^|:I:Ghdg m. \?|NH;WRlga뎠H.7C夹bszeZpH4&X*F-L/jn\X0ڛbKz0t)]A 8I 3oΧu >r' /䧹΄}%>vyQ–zi؁hooP:ȡ'u)8}l]$w0<ܞ/)(}PJ(4'>zfF[B6/۰i-NRh)L~ϟ`o&̮Φ_`pd3Ο ?C_v/ɖu۾(|nW6 gX薋)q.aF*qn5b5y~$&*l~-{"Uhx]ǛO;B2A!遡{Ov:®^t>mQ|I̟YAڮNSds0;hb$lǿ"5D[tc0P0ᔡOs s\Lms>/wiǯB~q.YeXN헿~ksEǕdz=oyN^zO>|YxG}kWq~/yk1g(#0Xax˃q,+e] LX-DNt]MUۓUަiN&"y{ 0{LN;j`@}=?v핫؆>St1y1 bعV* d]DLWNj4au7u?c}psբ>;`h:isua $W'q}΢2 MG%8dU$W|q>]$*kb皱YJ<%yWK>KCWߏ &;ד:w5_({ڰx[U>܌0l???٦%˰|~5}+;o_{A1`J"J?lpOVWo-e;nqo4w}~?ad;aFaF`#1瓫F(mf _kqۢ]C ?YsǓA eoWGH_vE~~j?!pem`W@q U ,2hu)8&Y^~뻌teDA@& Oc54 cĸRĆ{ͧ{ƍd<%إ!x$E3ѧ-,9<"UEeQ呂qlGuuJ;f RTK#h'O'N">-v<ɺ |4.$M')ymdz1WA8{PdF*\IrcJr+YU;ILz.u&Wd.VC%O_zjXt./NW\2:hF ZKˢȨ 1t2":<\?ɏT>%y `~GcR`t03,*͸Jb,#F"՞g1m1'( ҰX%R.֙uwHReG\0i7\xJS"p네aClbi.6A̪I|;{w;CdryoÏ> ܡ3-c5kwu u e6|8Q*aU0Rus R{^v~ 7yd3WVq!)Twhz97Ѩ`a ki>(y{lGMeܤ- ,,qq?OnepJ͠wcF$L&Q[+̵ycs+gqM%ω]YQr["/t;N5ruջY v4GM9PL+`&d.P7M*,?Ei00E,b3R{ћ?2āϔ,l+"}Z 36Y_IXS= ]{>;?ʨDG|'R5i4#!xUK x],< ^DWd2Sq:~|"g|s+`YYy8)\y?$'jV_΅ ^!"b(qT 0D5c*j) F'%ќtC\A!=f90JpŐ9g; "j⼕4蓏8Lv$,Ĝml/8o\}̻39gHЯF9P6rPWhwKCcͱWSGO2 BXS8'c7NƻNgYMeE'nD!wd4?\`4NLp:MI7~.CwyJLʰ-|]´o^ΑqPϻB8"`b`r%_iϑӰ4mБܤ[?v9CsێF=]l2ǵ/`Ij=8qEVWd\\.z[VX̴Q*H۔ɬ` )&ˣ@>o _:pn~mi`Xy&P:or61py<>R )`k U cpv逰 5Y@SJx-RiL}1M}7_oBvM ;Ǜ0L]:t?qLJ\?tNZYo="=mжuYU3B,YͫRlt׬$DJOKs@K~~s#ؼ>3E7Ϯp7+sSd-=Mv@@_\= N2QIgqD40L)=4zwČ#*,ެ?7Z2iK潻%7ibC3fͬ$C~Ze 5`W hc%ǠP1G 4ёaYqGkZ HNjj"m,]Egc7g=+@h7V*ZݗhHo>O~!(qRK=^03FÄZdTu)Ju@9$GٸDSYu_hi/* Jp0i"CM0+;祠 ˘%cOTb1bRuX_r/K1Aؠ4>w)vC 6ADI>fyt^O]LpŲyKlK%ԌOr<줁es\ti _֠<ޮ!4"DZ-!OZVnK ‰є 4|ueMXL?|jLQ4Q/~]~R$/sԗ̿Pm<ץG͵q̤72CVe*i"Ӟ*AXVJ >,#uaI +ቋ)>lt{5.f7Z*rl2L?~@4S-P|#kڤ cJJ [[SElo2L;}\È<~ʊni)`]^Yh8[ eAlg=h >8Rn^h<, q4w-Rx22"~,8Hlf6Ē1Ol*ܮ"r&xW ׬"&yqerl9+Yw7RE}Mq/;:a=}Et³O+ot GI-$ iB2sB_X0zt*\Շ |ubߤt}R3)84œ£wZb=a V2R[nKPqipgc *0rkgkvcs*ZP5" Dʸ8KцT^d*|II@T`k5aCQ{L{&Sb m0RS-Ly&ib 6OLKѯ<='Qi,@kl;vg,Є2ʨ#V"luԠSr >ANɅD1c㹢 Q`e1%)'4JK?4HeGYgVѸTO J )d4&jci-Ԁ8,WϵcZ$̽9&^`Ob]ph|Y."8R'{F<$=h *)n7Y-%Lu.6A̪I|;{w;Cd5ryoÏ> ܡ3-c5kwu d K8=U P*NvݴF"}So.+Lz^v^ MyNip~^n)XqXAaG{8ntL$"hϼd8\Qʼލ0bGm5Ξ?z؜j3)Uyg'!36<'t9gEn 'wFf._[;>9eݹ6QjaTŠ1 pG:}?L%hR8/8SPϡj"=M K0JjfBgJsA>W-t G^=rΨD*?>sz>YSy1_QસIVXvy,oɀuDh(VbZY^"D2 u^hlYumg6o$U/V /@FD>"EĆQ⨰A8akTԠA+;`qRRIe;7(D,g/qp$z!g1{]De$b$QL1xYcH8o% bj4wi8Ҷfhb66zo򠲭NGhzEWr)ة1 ka1@ύϫ`\_I/D`RGQJmƌ6>fim"S 5'ꭣ7oq]t5*Cz Y@S*եXcvΗ ӖXsjݏD}b d8^%72H-;i:ނ-Sivwz;ТI'+6M7[etIOQK@7K;CnF%rwϟD!:Cz.;`ׂM;C2HAUUIe*+9~˦#lMwY!> PږsHrD6m0^D"~wFx[ݏJO$A3!۶hŨdZ3ĉEs5BabGŶhbX ַ2*3l{lgrjۅoU=k|x/1K_ [ԐaOqan`:kAM~Ay$(3 Lۛ墨]S?;VkQۂxM{k{e1G\U)o}2ʏ'+D;y\ sfIA%I۪qO>wǨT|>}w'YK*Ljd<)V2""&ZH0<)c"ҍ[fr6~ KKB9UaaR"X0sWϜ;@K֥]QC7>SҵVW}y;߉ys-d>Z-V(#d+8>jK@W*Ffd j0DFd\^wT޼2t;%PE#K'ZK(I fh/#Et$LEt!"'U{B){FY1 )fe  D9g-!<0Gdi$䑉^Hƴ (QHLd$&A:łKsg H\ 8hBƶ-{I%6%ւP.|}_ ֧ IBVd/;ӕ*M9缙8L.Kz3%1er퍵2&v]kWPzFgW_Ztkr3CKc{H ylmHq[|d&9'4Al q 5"/F 0Fo86"ap) vw)@8FҎY | \K8G_ut/PuvN!ze՗Ԗ82p,.Tۇ(Ӿc0h-˲֣w\%sDdZLjxiYQA1L:d~a$Wxnx?M~ u~fE*<@x$:& F 3âҌ;$$" wڝ {%$^X00n"bPMa/kz;$EH.X\BF네aCtQس\GAAj,5ń{l847,hGCЊق4Nȹu 5+Hۮ#ZClR 0sXPIx)NpB`ޭy"yz%A4MFNf(ؤTH?cV])g^a}z4 c>\[~.cο k>b ̈́o~'@f~O^`:A̮vI Dw̳?!~MA׫}k.PkղWɊ/0\$wM5hTVF%T}MsIGhD/^d~L C1[b _bkѪx.8BK'UN]JǮXw.{Sv, }׺wta*ႇ!HGv>Ds?esdS7M$-ن:.a. X٣HF?V[n&|Oz3Qf;A\'1)sIF7&W W̩WWۻG9H\?V=shBrafߕӮ7K( (zII߷H%/bB@eGiB}rm]UmgvLb]ܪʶ GUP0" -"6G cL\Z#hzmQ B|.u9K"%ڈ,# ds"wD"y+iн4d߭APO5{4kftc5l︼t:ӯ;؎q4UqM7Ub2QOhOƤSS ꃝj"J@X0?y&c[(ڜ-rm| D8}Hou>[ z:1p8X=öB@HRDʚ3ǣJhϫv"@ʲN1{\b]/ڧ|}[E UG힮˗jӱ{ ]~g&sGשjz\L0EQsh]?ֻr{:] ?Yt-sh9~xez^iR?i\?(њxV鸪,rYp.Xs>yCB|sh.Kdcڹ-]:9y hvY4|DZ9\!r -%& 4Y0P`II]@3B%T8 a|xL.DpCLDK9 Q Ƒ!pC``OS[!;F[EXw]~a┼O/S//,q^RӪZݖM}VѐFQ\5XWU1- U~6 _?y;XR\&~ yL6 (7XafU> Ƙclr7 (pbq# |%gS4aJy( : _1eyݱ?@ٔÎm9J{Vx0#xj;*7]y\ sfQ+*enc J< mśS7'ov DB5.?}f$Zrx0bQG"`"RSFDD b FрG!eLD%6s)dP xD:꬇8_dv(۬&>LNcA1Z`(3*h2:ZPrD`p'!5R8v88-2็=!Dp){ԙ~ԙ8gr3(KiCz-%/g U&<}RBXRdkng~2oټKlޥ[6-wld0=57pC@X9 O~LtBY")%t4+z\WWY'P.TP1؁'e 1X$%R,Й̊1 rуQKP?뱫z@X Y6\"Սv9׳& T6J!۠LnL )&e7폙I ߭_')3}#kK\q5@Mnߨм5oڬ"{@C5Fz6+@C`0pog^rÜ9Q [cI"mC>d&f|F9DŽRP6TN#m:YqGkZ C6*[sM;se'K/0.ZF<ÚGfJsގGkb%a?/~BJZu*+K)an ayΠF,Ca_JZ |L/hf+c>L^Uʚ L,*&eWP#&Olo}5޼IUCK;o'5Dq^{FG礛xFº8 iU4No*7o*0ʥfFS/Vq˯Mët~+LSL7=%Dgv>Wi—>yz'. |ɱhvpIyvNLKA {M1tQeцY, ^x*2u""R! Ea8 )D v#rnz_nݗAՓ0p[HUgp3p)\%"PZtat(joFI*S:6^&^`Ob]ph|[."دRY.HRp-е3 w D:)"FnF.nYsэ\t#/]1F8.Y.aKXd\"%,hMs |i5_Z%,r \"%,r \"%,31gj(|hUH.TH.zK. y_n! _ο!D!e!x0k5f,`ZFL&HVHKDgM6C>=v<1e4tlL+Q8AF:,},s21V{r'nOz:[4kYu'vmʴbfD6Fn ; ov9vQ}hH|>lr;UX}0vR١=h(yf].3$u |l俖-rFګxmhc2bZ:yAk֤-Ak--,5ACD:)n$>h3-+۩??Gseo7 eCvВgy.v5_S_a$ΧC:aB*l-JA 3__no{ &uDhQhcL0&m4_C'o¢5q5i艇'%Ͽm*~O:m \JEp fo[ׯ;>v^/ƍϿ%Φ`*:|Ǯ&t.wh$Az`zћeY=*Y+kWRIt%6$Fi)WףLI`Pʑ ~kf &y Z2s)~F?|vo ~BJ F@ay]MQm~]X0[e׶f%b;J>_(VVmB-gxl=2/_y6=VGX#S>((yJT ʀ8ARL]/sw> d4Όysw=~5DS7`!woJڽ( w6AV_^>o!4Csuuw[bɵՑ'9eR4yr0;N\ص9ZywmZ/tNf5u GDNzfwC{%) 4p6鴢TjܕNy\̻WN_E_c &-]C'_l\R?x7!@5 (F{ܗm:G#[M#_P~7nsJF9a z*`4&-aL &m&CQ{VCJb #5BW(B;`E N&x8/ܝӆO7|vGk&IkuOlC[ &^`Ob]ph|[."دRY.Hp-еóf^srw _46NY[)Hw$u:gBv;;y sZD)Z0la%/x e`Nbw1!\c>qTW"6Ȱ, Ƹ#aZ D h|a ޶,v'HaA/*.>[k/ܼg;Qֹ'_Ɂק_>taw)D4Y06HH4iA—/hQEÄ "hceuW!QGv̋{ѹͣ ~NVq9Uu}s#|6WR϶Xmf0MQԧVz͚J6WϭK6Á6-)ԇխaMz}C~kߢ!ebhj`PgݕG) Yɾ#KM Z()*i9, +z U-%{Z$#%!3 Ԑ'{Y[b[8sEޣ>h!‡ء=dsk.t&UfD_fu~\>3 Q1Ⓒql||Ju|t|錠>z>2nD]7GŏzGGqM>n|T>nzADM073ܦ|rJ|x|xa>VoW|Վ ~gGX9G$^ iWLR˾ gPGj]>0x`*01&~!E `g ^YDH c!kU2Nq\ z^lQ4> 9ḅ|@T/-2J"r,N Y?yx6fA݅7֑ {$:& Z 3âҌ;dB=8P (O J {>Vwai~/mbQ42 &0 R,dNH*Q=AM,yܽi݃™cf~ҭF<77uk-cy7o&+lyP ?-yTey\Z~|0NF?^_NcVi%ViWT@Ǜ;B-VԻ /d <.5J56+Ⱥٻ=lCaw*SWVZjKKKj՛]僷uˮ*{sJ2@?TľF 46'z:m'gB_9Z ߨz[Fk5/'u\~mRac 5xN6Xt/"#{dGIAaeoe8L`$I_v;)y0i~~hF4)*^)(RMVeeEd8R׳k 4%_#z- @LpVb&Z2kez87~.!Qb9a cR(s, 4pQQE QJ4J e|bdwy;O> vS7-9}-I-Iɐnq"X<~" 5z) AېWhfp:ͪ/!1Ғz9 /%Kpuȷ.]u UiM BdQPPteIx!&霙,K*6F;]wkşkq̗3!kyU[Y炳a[> 6Bz[Po_khU@&仚oځdK3zCLX(A#Ƿ.Wz?D:\Ҹpuo!/HqhWG.76+Y4تs鑣TwdwE_N)̼xfS18-Hgՙ!g.{g+zl?0.sN>寏77]PzJöVoj˩U)lCϒo19XH5RLyԄ6x6v䋘m2sb[o}.}™^S~I}XesoOMEMf #Y95^SF({1ś#o`:f9 N1 .#k/es@mz_TsכT߬7]&&a:lqv2րj Fd(]Tx/P:5ۨ3Ejl`XC4h@ ցn}A.x# L͔B Ff7A^u>fi@dsmpؚ㱎^Y(U& ٬4Qυ|@RI1- ,-fECChf0ZbjU.*cSBd4T”Kp8=5$F9k9jCk9,r7܆ogvX>g ,,NYhU&@f #Mx6K5eަ,"Dtr>bneEiDb42J2 3 2u<(Q8e a)#i߲Lvc-Bi)RÍeStYF@Xs y(Ѝ3!-'mT8J!ϭ1b63q 匫C~^z hkk2SH5h>ݳnZpb iAt"AQOЌ(S;/.o\q]! * ftF Yi`YcL{\+ME杻ݵڅ{ioN7q*oʚJl&$9< <;M-U;6;wmv׏Ed.ӟA pmOfX,,Z&dYא_.lf7:PyPiǘpz=C Rjq8b cg:ZlMv@bUP2)/J)R2q('C'*VSʹF}g֞Yzn.QOtW_29tiQ+@+StQ{(qMp(@tN2K +&{Iz.'gCȿo NKTprUtAi I^OBgD ܈2D("%%90M$DJ[.0Q|cvGZy obqxHHRk`Gx`xluPCJ^4:F$\ʃӌRnI3)Jo#Wr׿uzaL_⯫Lnht]0s‘YNؾŜΞn(XLgz7&Q/߷w;ܧ[@Ո?,ƽDw&{N_sa/oX/, {C8Nx_e~׌Qa^7|덆G^!VX f6^UZ5~B2So{nRMnqeؘVꚚ6Vk=nC7%3ͱcec)/{]򿪒y?i.6Θݸ12~(RoCy}RR[ + Uo9r.֚.9~PZ,WwR`(C5.:uS1 ?H8Y~In{_1ew?fȦ\oE7[ riXɍ>jsZfޠI2|jLre= "R\杊߰EaL. SSQǻKDh˵OU8ԇmO,]>QU53 6uy{[^9%/Cق?j |6Oh;ֻ6 _ʹࣲeig$i:;6?U}"MQ!F1SeUfBre%-E:,ONu"s>n5&|?G߭ X,dSz񤩗AĢ)J YYX"#I=|ZV=VM=Je:N|NO9hzޙNϝ2WqUgMuk*$_]jImo2B!- u_+l^]c{T@6ZhuшNpj1Bu>+?o.9.eޞݻ2h)Bx*b]̄Sk]F,;KxO]|Bu 6y1 >Km̱,|[FEM1@FE+=7sSm'{u+٥]hnl;y") H5GZA))«_GF?:䀯R^~M Tsa }%ȷ 'J!&[@2'(IHb jޒ^'ӡ c;]JyҚ I q!ǒ6BL93 YD2U,)9vXNq/&66]?=d&)'uC|boifOV <&uI0̼xpfS18-Hgՙ@\ gWsFѩ/"Y)Z9-GtYmH2%+br<0DqIZs!Ec赦&A[.lkUF(cGvR0HehrSTL ǘ [ᵗ9tu wѝ&7+~ɷI&.@`02L5 iDla.r*炗uA( 6h9V2 ڴ<ue_e HSq3eP6bAMPצFYd$\bxc0JI`6+Dsa$}0TRLdB@8A bmQZvqZi䴋#0E I*nv#D&ʖO D-Rd:Qd,xO2j $ÝLA˰m!\y 2QrV%[}c. p&Yq6{۲ ֜ˠC&ȁrLqpBFӼ-j+O퐼v exB=epӆЅl̒9R}@Ax9qK}* ]ڹ]-9.j=Yܳ.]xj,DkQY`pى= E#w6m2 Rܹq!eGեfr X}iKW/I]zw8ɞJeGK,93<]ûVʛԼTr7=o{}u>BgSe49r蛥^L1'YgÈZ,CV<>;t *O#3&X(f+r*7Uv}bn 7-r`ӓJla[*ZY® c XN$)f$m]۟&,)gIȖmk\.F}|[.i[sw+X8 RR()Oa&ȅ7'B>Xm1t; O< 5:.ݼ3l ocTlR2qɚuq0 uDC_E_1oUۛ݊I΁PԤq88i7˨Fz kJ&Ӯ"xsk3?$"9b9^r8}Ci4}矾W)]w%z߯zMqQ',KVjR1ȹFJG !a7s4!9-qȔz;!WEP NfƦg t`QD3f)#D{Ae69Tބ67DPZX/YHDI9&E#(%3rBGTGRnT8z\w$!ϕp^SFTG5:D%-^nw[1tIVi^y [ǂ3|ji A!?B]eOGwzhIӐ+0-qWYP=CYcyVzn2h+]%-(?%$l#(GH22 GA:pܩhr,‚D|FNHa aRkb՝.4Npo,3>ji5emi cy8 3C|]5{cKm׀xҫ {3o@Zn[܍anV:h+vɷw= /YVߡw9;K T]pkS"Ny"!d:D8D$4L43)CH/_uPn8M\" H %5T8єjPۛ*hD){%"w< []^wz CdF$XTB{?.^TAip/6(&!tT j+D) ND ̟ʭE`_?t4~~NjcןR2W~drok,ޣ<ӝe6|3}~Gk VRhDe?7nkMr'TF/WW~[H{ơr=vݠ ^&7w0*PιIM:lGb5u&S> ~ rKig֩D`Cv]IB-5bjei[ڒCwkԡnxbA;=WeAڙ?NT6q=@f>!{2M:WmU]4y4,>I܇<WqZ| (mD{%MB8 B*r)SUH}{"]o?(~ ƧJ\7%V}=Kv%|yƨ!-hg(WáYiA]dHGHE*[xmd:u(=jgJkV&JW ht#,*GSV;X!ǽv%|u 2|X)&2k;w&83uwPc]!;iLv1gCwZk_{1. qV!Ⱥa'bD0_ 1́'J'(w T鳦pŨdb@6>R!"e`uKYJ{ڪϺFEnn˩+.شlƊDsؒ<B,Ĝo95P£h%7FP_Z-NJ6"erY}uď'YV?mď̋98-,Z|~"ON?ުeOeɌT!{xi]ت^<Bd2&O#0Rd=C-1(G.mh;:`.{Kn1ٓ 6jP`yПL5\I3w=, \iEc66MmXn $v"`@ fb!P,2Bg!@1yε!abu2dJjS> Gp ͝rI2IZ'iRҞ@(rOz澪3(;/dsyQ\4s-l!,\4)s\NXFpmfۄ޵)slV7+Ar |O Zs&)KS1gɉ\ͻֆI8]Voo}ڌ@F1s}4>1+S7_7[l?>5vm]{lQNx[EMͽeChɞxn1x7+(v|7go,ZR* vF D\N%"+'ZaNFDh:;le<{Q.WqD𨼎_A ,UA@*ɤaynP`j#cQUzzphX O|%e(b I.*uS\*_A=zie[sk,ӝ3$ $7!QMD\1##5:W#!DRԺ]:B&ƇrT*p^ ɞD۲K4<6 q͡~s;pdwDrU%SoC;|:'"$b 8Au^G eR3.rڪ2!權Re.rLq6y4f.2Z3&AqB%8x4m|'OJ٦ey&ROߟ7mX܆$IYCVrRtZy(“H%֠CGSQ ֊ 驏@C˜!;v%p*%`)ȌA9zy^[áY q~5]ןk}J鏮e,Yzv=YwUv9]AU]vy}P`|RXZS yWO¾@㳧Ϻ3m}Srӗ)imn!UvU׺vݭz+MW=5wŏwqqq_ %7E$N\ʍ"|.tf$ʲ׳@n;Xh'@F0PI|2x`$'i)=ɂw,c@T)P]s&giM,kTIhÔ5{;ئGnۦCFm }f#p}>,0U>o;Az`p,?Z>t}7Ycn^]u;>wLUt[.0,zՠF.%)NjGӋ_+~xNvs$ll2g›)"n˵Ŵ]>[T6Q,^e*KeϿOye dgB#(QL l.tmjvJ(jԽBjk< \odh^pFI:0ҥRxMZZAUG*ꭄ3bKl&i%<^OǷ";mZ.hNNk_^6|~lw੩pt@Xp8m4q`n;ßG/|gA>%=F;O;i縐L;7fhBٖ 6&Qt@?d(rLHiM=jX숪aFհ̶2`2eW,(7ڄଓ`{%$+:H1TB\m l#!{8cSBd4ǔNO (ɣ.EZm՜a.ߕjr9 vO?i |iҒQ@a~ĶʱsW+5E%AYXf5ѪM,G@uY*)6ehS[! `Jg2,|ԖhiƶҬ9{oS2p(ˆuH %wVQIrD΃25d)H6k Pg^k$-[bds/Qnt]i4jv+}Yq&c\`bh}FϹA0B [bF.)+mƎJizb~aku@:0mYAGT1 g* 8?Ƀ%%L3//%Kp} ^\A_e2M.|#=@ -5on#xY6VRh6-ӡaN~Me @L@NU-cly.nF^k1ZFE;Glo|(i~(ڪf с/iU/ .jwlyfbUun}8v^@_((뺯9G׊Ew_ۮOIO n i>6앾z <]uj~2= !ao)P#[vo$4N1@@_t%;(dI?VI̾LTjI?4ׯ_~0 ޽nK`vuO(־z;^#EyDoUQVQffUWwP(ʦe{eӏIz4CNGn YB{#o1'2LeLaN%xw[uݰf\[~' ?Y?g2B*m,8P̢[kfmdi!5-Ī\cAi0NPL0ӊ @ܗo=۟lN#m$;6r^dF%24%p9olG `4_`\x9*PT!ذCUgw]M%ڧt;p*t@5R2.tAX"RrQTDBNcBkk?Oc/j{rxr^7 6ƣٛծMV=Io}.'e+E>IJf],5MJ>5vr;h |l~6|ڎ߱UkSwR;OBDV8;A/_}1 ;䟶T=Ζ*MPOX;-qFe0$Y籿L!zP79ѽNB8s9ч Y+K2 )"H,&2 BKj/^*+d]Ӄf#ܜguf$c|k??|=%exGo o|iћFL!/׃/*ō݋5=T9{c;*ϥ8fn:?Q&o$a0!JP-%կ-55wף(E>z8Ǖ| fuϺi>Itdb1/qIijL5(Cl!rb |n n\`NצzempCTy?K_Z~͇,ϼgs'w/^~؈ng7=}'USlEϯ^>[+yJM]{ g9U{EsYf]3*SqYeBnqN842E1͌B A X*)[ H8[AZqF1o9L$by-z|KշSR q、3b|h睉!ZvxT[M(ePGLDJqRt}~VAȖrK>$S^!4τQpr} E,w<ىH$^+C`8,);BB Y|Te.XeĀĔ 9`57&#jq$I"JFk$o0h4@#6q0hs0t_!xMUs S" UZGX6UIo_#x9xO"y `53"ZBf;=`5Zbm xAO49bP%.yFQ0^ FBƶ&9EcK)j.9+IJcD - Cp0 ̖(-Ih`WHh֏@7^pq`HAa2BB A8BR T U ىPD2zXŇm&m@)!%|7i Ix6_#r*5x)5##PLV;pىPW#x kKZXD &>qs62srͧf) •iL<$p" .C%4oYsPD$zMϞbN\U&e2J6dZZy9MK)G~f#-SG3C} Z%2g4d:hd }9  -^j1tiђxT_) YaB⑒flFˤE'N1  M+{%~q6y]NޮL;!9ԛ@|4&Zs N vn3>}gzPoize҃TE:RX.L$e&Eb &q,͖B -Т9x#0 h1q  Ű.m8M 纵GQ_&R2Z͉p4|HhRZSB$:H)ib8갦Pg@\^3Q cb .c0\9BB CGCʬC#yENc#Fb"/+$un5Z[-6E 0n0 ] -l RI%PJQh:_!xݭ;Lq_#W8l@k$4ڃ`, YCu;KS,C+$m=f 3PV@X#xoJ0=2搢zN31,J'u[6k$ox^) qH*l$9${MhXV9j$4i]k d . !mf$iuk$l6߸s!jE~Ɗ3Hh^֓⨥e&@ 9`lrHhqiuZLKAT@|TH,!<[I'Ŭu&kFE%V}1RXeKb\s2[.+$lѶ(I%,+.du5Zܴ9:̊%"9H E7>m֭p"WaxiП2<]?;,cY5KA`WqBHtZZr 5ϔ$µ ru,NʬD:J)$shnY9}@u{qGd?_'U'BPcvoWR[ tz~O>Nr}Óazɍ_1)k/,p6X$@aXX;翧jɒnYEdU.ag_08DX(nʎEW9XyaQ$jNO X!{l&y,&ixdT$$#2\vBzOE׹°Ѡ &V_T ^:hҋ$8w+UnErV'-M;f @cJڳ2fϢ9̚"h]u5x0MODy|'<#[-y ^kbX Z"d!# :@sE&R*;JIƒNrDډ# ΒT&N<\p )Hڶ-Fd=ω8\gX ~T]r;S [epOJ앹mUb%;+$dY0Qd6[a z 6U1{d |AR`,S /XgiACOx$NXEoJwsKE,x+5CIwea=*Ne2x6FrXiHLF )>ܳYؿJ)z-5ɅNpӛY5^f.R[_k:p#Jb[y*:g&LHH#"[+@NIGh/߽+-qbL?rkuΗU\r|^AN }d@TP߇.2b@-K[ZF}&pCaFqp?]-|~r]lR4,Y>J;Eo& ?;2cͅ~PŤfS*=EXlc"cj:׳ۻG^XW]φRdVaϘ<{d!|wMr(:vsZ^h;[OOG^M;r@<Y-Uȶk;zU٩ɏUO[*TYxyPR4 B6_'Z1b >VzmW|D+8)V0b'!K,*@ˢ"cZe &R}gw/dֺǚ=:5;6.:z,w1ƠK-/C : T9;YIC صꐊ}Q|+#-vw?Py A:90#g*AA>L ;P ž f7^p8j#|[ X۞b+ca#3!-JZsJ˽uRG] u=e7[ZIK!H )hkcp">$9sLNyMsVcnS{0 QWkyy3w1} m!|^}~)𮙶Uhw C3A=JîQ!/v4ј£1~47-k_" _Mh[1A  YNpӺa,]=l>? m0͑;w7U|8"D({1AsR6s< ^Gc0G6ea^*|9ŘnnLR}EBʛșYh85/)ڴhKBECC֩*f`CB=N_dT s})Zt+9E?N GaPRG:g?]LlPy8(a+E܉'gbzlOOEO-Fٿ]/!\@+ᴂvdwQ\*XL.@'{9{:Pz;Ik[~fiTa8~dVDa! &cR}A %'JVd3a{|#03=`C}}G\W5A\ҥ^4K 1e&GB X/cHƼVl,\)NAíD@pV(+ I2bb%^N)ioZN 'b1uPr7P5pI \_^>ܹgr0;s2ڵE<{>i[F+nj+dLX NKiW+oG A0}L;ZyExAT|v t&# wĈ!EsL)h},rcp8ܠ{Dxdf47w˅j=fБ׫| Ů n~s&qu MJ-2J)Plr79j1VYhtV \)M^A9o?lZ.,x4}:o"DEy S XTgT^b pC(S_g]dQX9|"+EbRj#E[ V63hޞcxZ{ҥ-ȝ-4J-$2~vkOJDQҁV0Kbe^*a2ވ' 0W,c\g6wbn2t`zm3-:=͊5hQ=-1lH,AX!Z$q`)pEP$GR2m2 @B%3rKVx2r[goVNzɓ|sv,ϣYJ'ۗM#m)kh᥮"g1% 5>K9^Xќa2E B"y\#Z LX JSy#g&L'븑=^Sus uw!D$m:OXd^pGץOn=A!BCE+j]wwO9tf&3ݳYwH-+j:];/gr7?寽ŧ:c>_Ϧ=7ˆ{zGݷ%k.hzh}GTm^Sw)?YjͭϤs?#wA]Kz8ƒNڠQ\\#]QZ9Vrѫ}y#8 \ItP[2)U"嬽ȳAi7zbЩ~z2Ƕ3_?ܐ>Uz[/&yNҗzQkw{ )!%;EGϣ]gShUZWlTVpSKSɄփc PwEJx8-7?~,ↇN-fO/5>MB@_w|h>߱^7wh>A:%EnhhyiP; .pi;r-][]?./G% ^M/7)7'K%q^@6s~VVpp:XkWDZ쿌n5/x\\- f/ ͺ>`BBhe /L-,}űS}Duv q? Éb+24z}x%r&~{f9}S.TӌNOLSޜXJ޺ct; '&_8LAD,;6x+e:g&np`y[rX"@'=z.Zn;}715oN'N:~ryedL9GLgm,΀NDRP6i-5'I/w5p dqvw)5;֨רw6]^uѡBzݻڟ^+TyEvQeYR~Se&pctMgb]^~A)vxG)vοfay4\8LzCXÆ JÐl,{4ҊhT7HYN1́32IMvE~0 ) x8 dBha˾-eaIBbGBѲC2jmPIfi$%o0f$a'H56MGY֩0AjGHMeiՔ΃h ]dd=ω ձtF΁O!H/)#)NXmN0C%@%AAgT.aXq68ORlW8T- .4+[κ["2=b1Ζ҆^` Y'nCz]ϖ1z$N&N I^$1Z 6BBʄ2:{wY=ƿ6U;A9L@]>/nV*—V G`:(JX3Ҽ&z]-`ZsAJ|u,|VdRwA1Z+LTexEts'z9)#zgzN:}4^ RT!MRۗov.8b+ "͗eҢ}K-ϴ:Ќ{J"O(i5i\hǬ$Rgosˠ2}믓3߄H;tC MVΞ[#~BmBbyNߧࣇgZ޵O-c2ۻ~h | mm+Is$Y~a$HbQ J YR$ƬP0d"`@a"!D/.>60QkF%B$VIlto.}keףY0IJbB&I|22bdJ 0 &2]a6ԇ%.Kh~#9´E*,R#YS\N'A@M$I!R{eQh0u/6X:7J 1{Z\z*69Dm.A%jHL=ϥsʘg K̋B%4iy.JDg%-F&\_ $ڐ1:0Q=8!IcKU i=gj1I0b#oc0=so F cÿ[|2an}k8_?,UZi燻5]||bw9e^}ggɻ_l04o"w~ewOjD ?[[;V*k?螙pvgb_sTVXpZ,?ޞGg\\(c4jomUz&Rߟr9ͰY gs 7QeКk:zZ,_;o 'ޡzn ,EZO7Vݓnd0"?voe2I_#}w+}׿F_.'kԝ.|v|9Z%'_\tgTf{.خ򻋋M $|֥)\҈O.yh1H:.9a7t&)W m@KҰl]ۣ'st8),ʴGʖYLJrP(6:&RP^0"糏f68a,#xR*wjoɱ;otkծ 'Fn;(ӣymG??dSĖDt9Coy<#oFiB^9iZv6jksp߹0iˆ,e܃Q:j}_ّJGw 赈Ȑ}c0 pӊcybq Y ufD *TLRUL31#WLU*q\5xa8V9qKCNmj zyWT{L9)"M_]H+Vɠ)eh;8rS2uL~o#@IFZ~t57掠G!Y2y )f6!fSJ>o~cf;Pp['6&_YL&H͙0:zoqx^8ȽK|Dqeoe\P@!BC#*. 2(aPBRǔ`f Va)%Vž7+kT#g;=q2~#PFF A0}Lw;O,7.=Q㏯ WzR4в!#F)H4j`s,\!.N(#qWV9z{#ei\6QoU7ȡ1Oz z+9*Ch@рdqFA^4sG8 3 oʼdȘyxb2=: RLr#(}"+'ǣf~M"X\4!g)"hkkq-CI(墒6enX8" 5^B))R1uDIk602`܆>d3N D^b:2[,I9;FNd> >͉xHOSz,Yq`ë#m)+ij2 BJik}^s̯epbأà#-r}ƟF+2 ]p72@K8HB4T2`hOpptߒSi4_}6exbag2Jgٴл|Y$.do/wk-g8#Y]6-? Zg.]\ kMO%GrtersrTϱ C+iJ8,^p:,ݭXDZl^OQ뛿uTrr^2^0{<`>Ь{ ]#W^(^Y( CL<q씣tQ:RkANpw"+5"F.wLxӽka9v>i&ЌNLS^홄zŽŕ~ʞ{6&(o!}CDɲ3jv'{L;6Mqow2Xtlީ VBD tң]gDPԴq؟V2T~r@h0酎20(PBr^G0{X2Zi"drD+@`RRq>k~c&mg7REoz=|)Ǯ׽\ Ӳ~Ff [f͠X LxSsBΎ<;;RJ)vf2^F2ypah;q4a =B 6i$>Ǻ&Yvi8H#Χ) DJLh-S(NgB ('@ %ߒuT,6fhd~J_>tyh'^sp6zs{z_s8PAhz.HQ4IoXYou.Q5y͗"{E1Z &fSNbI<ڢqXnqkհ){1s+Гs?Y_{l:?Yxg Ѹx{t~riHŦo}o5[ɿ_ jۥ Zſ/qDwYPal.Ԓvҙ\zm/yjzNo8>j(tP%$Zl/xgb:I7.VS;ZE,8Ho7w`MDv 돽cO-3vޡ^]5{>EzPN_m5Ug0'b^ Evi͎yz-qzcw \>kJy`rV_sX9…JpM`@ M.GrV妳wvE'Kyl7 [~>ξsd]wJ'oE2'6?nTupXoRr']=&rLlMeI%(ҕc`g^jI66g?*UfذO9WS5j^U}cÔ5dwR  69Z  r۩XꝹlS5Ll37OԔ1bN",=s/Pc2c1N#1;f(:Tb/9%|n<nh`Fa>V'YW1t.RbeXDe9w 2̿!bw!0ИUlS=^@1;. oTh}GJDZ- x]!pT_WwYRѦőGQ%cNBk?}0'97'McVU˝=FS $]1ڪTwIGQ06c1̙K]Z#vNR -rI!a_oh-s;yOi;k],HU/35H!S468SƢfnJ3HT'U.pJwbP=5P]ju;"4F%z,/VN;HȔsK>(<ؠt@>)h-a]CNζiPTJV*.:\/ H28 gK'ǚژ[]J2q%YgCqИa'+k.k˶xbr`^GQ7fa=gF*D@ %ջV(VqT6J=R")4O[5p\ jRcM ,#2 |Ws;ͯ5V켊޶X e=ಮ ++q2FN[Hh αZc1gUf`p AC\"1e/%N u:%@ ic `Ʈ pqYi T3RP(qH(ʨqƢA(xճ1#L϶sG_ǥ8 jcYg}Jh B;ےڱ",P 5D/#+P(\|PSm!򬚑 +Z{&Fσp#EȚB\5v맍Bf6a,:fa!dURh3A78ˤB A3j'J Ckဟ?HcX!ճpg&)H J[wFkTJRdNԳ^ Q!,6\\\1dn DEⴱC`7k̦rUj4aCS cViBMɥEH2rMө -(q$*\ ynO]GEGFa3ll5떖(CuIZv^%W[V7駭>.^]T[FrBF= \2p?)c8> A)ȌC;CaI X@_# dQ@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B}$ؠS$f!ƚ~h 5@P $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$KSnH vZ!`zϥ@֚GOJ$jB $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B @㬷%G;$V!`яV!F(04@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B $$@B}=$Ї[둾oZϋVqԫm{sfCOKkt~QtK!p)x΀K67l\>jˊ.~$Ya`2ohM(/IV5YU*̠,Wv ?7K* hۼ( abww|ㆸ?ML9M>o8kgJgV14iCߢw&] jmmǝVVҖx?"])CjLfg0w&] k?]>J ӕQʅt !c&8GPսӽUߋPOW׸8]Wɒ^ˋf^D4!%%9eTfWTq:] 3ߥgrȥgM4&J8KS9j9zgsћV@࿫f&{.{E/miI2;N@y5u wr ASbd4{k{Ή8W]Hj.4_@"hIc81GWtc*5sc͸#/IyJ..[*i8NV.Jtk\i)UOZY[#~.$. </̶ \*$7SfDα} 66֯=ЅR2/U @Q3; ro-gr!Vk}eoK^i~pxp{Ě|Mmit7Tb[F 蒊m tPVYLL+36 Y"`7;ٳ!Ճ\|Q%JWKZ.w޵q$؟vqn\p$ lbS!-W=3|CRdS1`4{UU%ki|:c􈹔~KDŽO|LdoݜzМ7hPxwabmf&/eW~ϣj5f!Th/ [\?9yχ~lNcy\; mѫ3+V/N'U2^9N2zI vP03dmsqIBOE9rä[.i[VMzhovTNHJKR}7n)SeJ1%И#-5dڡذQjӵt6OmM(?,|R2(SަD.iʪdqˋfyaUųN^d9IE_Mlh\m7zVZ6J_DBKQȾ8*w1*kCO]C;c΄8gGG-1hGAtA"Ea8 ZNyq˾]ÙҹdiGv| C Dm:dm88*LzdX.K" rdݏ}q.̇hw$;hA(Gӱkdz_xKѼQ6]$JR ,S܀"ؔvQ RZ9ʨ#V"luԠu+¥&`7Qc*5*%)Ug <鶰^jFe֕ly)4r4EE@ ,N % %( VSb4k+%)Qb\L έ tYRW(ǡttL+{I(!L4+0(Vg)RIJ*8DL9td[S.9!8(q4چҦ͠n;Lہb-e8aikc۝sB_ꄔ )cJg cqT@)s+T,%1rEbD hp&n8 ] )? 36ik_:E7 P(;òJ-[,?r@e^0 xhc#1ŊG] Sb܃N,O{K/nXDl? F4vq\䁠 65ԈDU/4 c!ϽXXy~Id0;C Jf Y\et"2ҞQcDFXn,(9@8Ȥ1HW-urTx"HtL*"o 3âҌ;d (B' R2P^@cM *bv/vveg%*N##`: §  )GrE8gljf_9'ycń{l847,hGCЊmARDhi:p"UCD)eRXVL2-M`) oE8?DnςmiFKyso#ǽЇߠ)I(7C`4}+6@O ]Řξ cv6I _N&zCFo~iC_0fo g E0My?i Y;N EX- /7\&pMpLWᷦ7~p(W6Um*q!NLWO}5%mv:a6vMR_+-X0m$аdXҋ~KwUmeduuxn?}\j3ௗ-ɄVnN띹͙Zfb5/tZzguW|}>Ceo@t񻾞wvNmrʎ¼n>d OЬu3ݤmd&7E]ϴ7> u%ü\V'}D}t G{dB +AI ܼQ$2&pPhrJ4JMBORwnR6iBzatC+ (IZ^(:wOH\+P1@弇-y4ZPW:I{NM°꭭n?/4>-?. ^FD>"EĆQ⨰A8akTA)=`qRRM粽GXCR/qrSEJYF"&A"i"9ylT筤AwGLY.*.v9q`\l9.wƏ9hz߱^U,OӪtվon}WCQM=({V0#*(`FBD17l!Kf mHow[߮XP)G 7+S e `m-n4j7%{a؛@d "Pw귡}'nۆ?GDdܛhR󶽇CK+ ƧJ#2_K`W JhƶCZgBZ qhf(;?*jve,:uq? 7ł9 ˭.":F C]o u=XpikZ&ĵ r "!DŽ3@cĜ*"1wX+z:jO/nVsCxt"fz^<xt2wP8/##V*Eͱ&pay6(m˛h&0$5AI< jxT,$S);f.qvhWF'j6OͥN2R?4Xp2~--(0''Uݰ^elGMUr67Wk uUゟ֖yw5BsI@)(89d!(@ۼ֢/Jt ,=)Q[~W12#cLE*ʘM=mE)svkI¹DmiϾ,R͚/Ku A;KZE# kf%"@ F/pJÃ&6id3ΞU8*Wp$a.<H144nAa@Σ]} b|5yB fQ$eac J,N͈.qQ(0 q|9XJt RF,B0#BXYL^jʈhA #( y$RD3e5Ko ~UcM;PZ*n]y/} 1fMj-1dX((TP `EhjӘGpČAc,6ʌ ;Q) NPJ25885x#zz<&-٫կ.A#>E}WtJF{yeoU 7fvGՍ޿|[E+Gcf\'|Ru*߁ń6Vd(tE]z{НrG2;2$ެ*v[>yQxw hJnkot΢筨gSe[tlKޒhg>5g͟7jLZ%} ӃB<7?oΕSH2(W?->#^рmP Sͻ3ߣtKA.{G,{#|/0X3m9#ɿ2Зo3R & N>2cTHʶ~=ç9({XfzfUN1K|D]b@(ٍM,Yydt SRR%Z Ŝ(aHiclvOA&|+Lr\E˦D8/Wm ɻO4{'upDPthAbciS9GDZnymԯo!+ 7ܜYڼq4Gs9Yck=u G[G L#ljfMKS)vNETbJO@yZis^Fb4hvA?e \L`=m1L :ȗw(gȏW-r 2t1t;v}nhc.x-૳Gf))'bZAj&NRJml!mۄK<] QWRBtUE.8\Q`U3Vtx`,AR WxcX0-T<0HuvmUEe0oǼSVʰ".<Ǯݲz{NҌN@4a׮lR~^lh <1+j8y 4:"ĔTʼ6 y)hxtA˘7Lz)IжدbD.7F7{f{xژ;-;cc=ǝ䱫|fNJ#6ֽoͦUGZeSHwSTIљTAFeOz]<7mRo,|O1vqп*R?2MN],6,P D Q~يeuV aTdyZ%pKA%Sn]]"pztN5:G7qxd'9(# 섖(΄R9gLcXDҘ`@0D[2 #$ OUTj 8Q!(8v)&8 nSl6]fw;vvc"DY`P(PvbM[tkmzpaF͒H`F 4TQG.DA Q5؄6M&kZFe)A@xma8zyXü8[5AZN9NQDr(Q̀ptala O 'IKC.NRh{3|ya)RPp0/:w` ^Rp}^2I ^PEL p Dx泬D~/e'U$:r &FCc==j'ܛs,ji7ysu)?}?_7cD6<3=n^c؋m<5a<(jN; #Xcz=lC Xx sZD)Z2li%% 0n?TGuxD˹VbB381MzDZ>Dp#\R(QmYNGEdQ0A[R.k%#RHDcӉ1[B⭜ډyG_7p )Gj\ #yjcA7}|oxb(]X]5a(DT ^2 ztDsvq{џqy4#iHo`pĎJ &\!IqG-j l  ]IϤw֔@mxh{G1M2@UGkb>) 0c)?9"9-dHxeJ퍵2&o3ڧ"u ]z>>|]z| 1MB8N<"X~EăE-<Eso,<'t~L03NX"-'ހW.[2:pf|ֲ[9 I}r|'\ ͗v6'z*\q&m6%f">eɔ֊.{9Ux~>{읒SeOo|#@NپK,`tcpBC}Lad鎊eSoL)6cPa ReHn|LHqd9'{)ȹɤ-@qc19;'pPvYgyK*5 yr:u[zn7I) 2f]2մke|j <3X=&oBﻻK$񔖯|F}8Y@S4GMV"ߦ}tBs)k Z1n/eC Hha8*lNc15t0 6AFӺl*Gާ!g1{]De$b$QB1xYcH8o% bjdS6F;D ܵf|ϣz:I]fS=N6bJ~&*WW^~]To?L-#eR#p2@ A])Dy[ qD] aeΠFX)x^T$2͒tNfAiRobeLA{VB(kB)00ZR3JuTFm[V.4-크H90O/w|)ɿuKxKz<'5ߜ?S%&2JN9^] xUcӘ_E^[۪VxZO f"kVڭvfI5nEczn⼸d}7w jH(6Mz:*mpڋnp?M\vH/^:]i4 H{ys?7/^ yb9;txjFeC VHΔK0JI`((mD:  YŇVNnZw]`?XO 1-_у@V.IQI)61ZX^&;ʘlKv_]G&/sUwRB3C]ObiXd_o.P_޻(p}YHz=/$M;h2pc@mN׌_mC9jV*I']`N|B`Г?KuTϾg .g̼̪:eoʕnؑA:I) Zt?뭁P&`1p)~ Loն"eJ>f~Ԃq>NZBiS} !Z̜6r7Au$4U% %aiHX^rpUmI %ȗZrV:Afx˱Tp.`9V8m#V||?Qu հd~oRY$oBbX2tiհXLjA%EǸ./;q9m XSg}? #;soF}S\}rOQHgFO> 0); +1Al}oۼ{)Sd&z͸hf׶i^.3PkwrED52&*g% Mt۳W:N;O|0L8]n x4N K9%hĔVym"6m^s4_F9]OM]o&IzeLm8cHzfK(DS~^0-ѫMz)o|0Q}7+'2fW4#c$FDz,1R |Żr])X's̟Ln^Mz;ܼ%'mn')`E[;D/*p}N\w n53Pe\(wp)BSxāZ1bb>ZI}[L] *'u9a]M>Gߧy{9mcmJlj1ƤDM.>+L -I`X~Cf0LzJw%m$y@KfOݍsx@`N#WImT")&2u,YQY~PoI6uQ:;mS0zһF(iR>/ r^Vf_enzcR;;nك{um_vkr>]^RR2:0Ryo@b< uwwV8+ҞW<7ZZOp.ɗCݝ<6g.6:hV ]$$K2%bؤpsVE%M@PP( If#w=ぅFs!1赦&TF%FvM%@#wTUWJ^lj|'RSt׃K( U_z;qo϶Tlxګ'[q ߕwh+e袍:T)-6fR (j^ /0/~;~X0DD "d HiU"zhLX?"q*c #22EN\m  jnhQc3P!ecW#g퇃`\^ʮ];ݵ ݮi5,vɠp0@3E6 tt,#3$\  gU:d칌Y(UJYi& 胁bZ$MpA(IZ5dlMN:I;]A(ټj;[6[V*?gm?ŷjM9+]ҙOmz X(|p>"wn++7 Y{7ESlo(0j%]yL[BL)NLՒf׍1 Y#idփHX,YR\i$ r9 'W:W?3>62 w ?{c9܋Os_¨(c6D@CFtыK090EwF#$rVީs[I>MS,&eqȖrn3ZL}n~۹CIkB)A)*}-(:Z#/EPiuaBC:^.JkԜZ,ڃb;bQKi"Ux|WhϺ+[A^W_*;8u|AH5 P`.)*~uRAR%i .jM|BH%x]{E b{w\4ZasR?꜡ XR iLB+%jdbF0YكRԺn"*d3>&EVWqH^*PSNr9;댼 a47atyJnL+7C:[g} OvBxQNuZ& iV(: Pm*e<#c㒠JpLf4ĜU2u%$+3fF^6֜ˠCʁ\R4k>Z:>ݴ$&t-mc]Yg, -1>MM ڄzc7~M/OҖ'аM7 |!ec< h@F0h[,)pkb,wy_;׻i|ߴ[zw֍ SemU)Fa<}繾Tu1rdȖg+vY}8J')P މDMhg&UDPmC8얩S{y"r#ia&:#fef!sMpte4iGos02|fOjo^fBIp ki7-o|j0P}9̻NPs:ӥbuG[xhƚǟ k5eW@9e`?bgj,.6p؀Tr8w_^<O)$}!efe [zԓ0sMԵ*VcL?TTqZ}~ nQ3ZXM:= >{Ws6^!'"LJe1!l9'b=²ڎekmξ2'(mrHb j+ ,: ǫg/_ x:k(O\Eoc'ʿ%LZ0y;B_X ̩St!RMҚY"6L9m]ݖǪ;39]7}8xfcOӵ=+|ci2Bn{rx}'(>~̧yVp(5(jGH[5g{R3g꽎\{L9RSS]ޯռ17WHOs5-L+w_vMjީ*-օI}}-vl3n6<@w+Ӣ]x[qR7R0Z DTP>e,ZI :QJ!JzD׵J!5&,YuxBj ELJKؚ^۫t{E,Y PeuW2H2HYeQa*l;&FyW!>j XE1x9z%R,-OU,-O],-OV,Ki.Fn@YSP zk6$RދhƔ9U6!%+.RYJ_4m2d yf1!s5Zi*rm9ە]蒊~GS}GоTT>Fmu'mc@}\$k: PD<;:eZZvJ;KE}Y+K2 )"H,,-ra\9Al5j iU@ܡɳ)ZنS[>v'hxlݢT>:L*R )2ܝBS|; 4Ef)HiGJ#=H3do9],/.Le:qp.χMc62ktƳ|d2L*3υd)+9MQ\4YW'`oN]x癊oNߋVgvڔ}jZ}ɪ>,7Փ'ztg˸TEެNZ P%5FAP@NHLz%*~"RdlW%ʲDEJfHPNjNe*A;:H)d8@ T ]Ҭ#)!2cJPBivN (#)rV9RZeTa>|\c{ 5J>ilؚp:rņԳ:M"yk\PJ>Ă(Ly tf۔Ue*4\j\J ol HBȤbȶQ[*J5rv;|tr9bneEY/!id8*j,^'2MRPě/p(^ -t:1qo9|`&N{$ØOntv.[22[貨[ ['c$c@Q81j.bh}Fs`<:Č\x^ic: PB;]-ui봭 ܁qrF }|MZQ2&^w5rqV 8bMrFa7#7ԡB9G<~3' i ƐJ"3ozAK _$% KnƯs8|/B<w1~&.W5?1n Û䏹{| ?M^>xI:`!DZo!d8,.. +,{Z5~W˽^L56b6zڹp! ҜS.lNYI\=+zL\Kt' e y υg)O[#cYCVO˯)'''jrt!- ~rĠV ޻ja<R@4!Fk`W%BOYEc<'+bi}raC?5::gH#E˗_owi$ O)>bz]&8x*qSALz=6.r w=.^}n50>@0b~5۽._P\Z`ܼG?:Ӝf{Dp>\grpHu4M v<짙oovi!Fz@Zr? Z weqH4E<vѳ]`s<4d0h]RWɇ'Y:$*) XXl`7$4"BSؼN*9zQu򀫟&֊V P8k-&I: XKJP%5:tk}?^ڞtyIge|S0[Ĭ7 w:;`@=-yC;=r8@WF]cSCpהUdug,*80\vcb=z"vji^>>y>,"5Eyᓗ('S *"d.cI1L.)ǝ)Cw|~n_G"L(wEXh" i#iuF1bN JTij q HxͿLQDz6{,_N =_:M*J-/nru_m$m79})WŶdևo73ɫ6 ]V7]][Spc['}F*!$1 B8]qXQHR^ftBFz]-k[%%Q\TVKHK([(Xx{eMD)ϲ63N=ϰvQS_oɊ}z7_N]j4]&F/'y[bDM [DfoD@XL:'"Jb4)aw^n_J蔔Vu֡d!;+l @ R|}Nq^3.emvwՆWmVTc]䍄je/8V@&V1A˘~S.&3\}s6KjбAnpKz)u2TrBdJ#٤$` CǶ 6hڠii'a{S[agW]'qsԙ6f逘!X陂^Ѕe> U\}2ILF*ÅWxaYaqhy-zgO$N *gt:D@}lB)% `UH{QăfW{Gƹ6O3dUeKcDD֢фŐ$TDʡu' "x$Go Fow9-1!"M&f+(rPOx?jAegv|Sj[{Ϯ,C` .aYypm벮$N[L"ijO4.^9]6hCKea2G(x{p.DdEW!}*9)cxH9prbx4$I@4c"!9j\%&㳟߽]mV6z4j]k4"Of+~9+n<2/\Nn}mGRھvJm M]z7n,EJO[>]n};SV1{ϵ`d{}VRѽmTXs~b׶<1f&on^s9߽kP;*]MR4lݻw*˞ hcta&cKiMZ'Wg9SFWg do,7 Ve9G~D$yun$uHziڎmTf.pL {i^Q߽[%?j yh*)otcx[Zmy95}\叹b*}OV ³'B7q)a<|>"cJlrzy.ni/n롍>bn=U]w0~:XyH$eDD&Ige3^a^(ˑYj0iJ)Kl"_`MPT<*_xqN)V) س&~ g=N&Wep׫oMxM]/nW X92:r6##7_ZlT)g,ǁ|rHo.P*rbSk`L )j+Q^q1s?8w Vc 0wmyZ΋ 잪UGrh ~8.kGյ~X*ƻ*Cå.ݶnһT,Eݛm\ ^ëսռ*95=񷽾ŧ:62QPy{*.+o#Zk\4QP۫2fݯ f,L@6d4F l O$q ?UR:1PBAh7>m>OGVE*4xa}uB9BT0{u/;ܲi}`Ճ 2%H$##DV8!m1gpTDYˀbsZF+kjKeԪDU޴M5q:9.>oǟ9䖋 [yӛ]OmfԇoB0dtPY? SVӹ7. ޾ 3YjpC|QnoTt=BANi'#6 DFe3uZ_6a(QLM'Hg|<~ 2O}G3&\kH_pJ ' #gƛazKh><3!ֵj/(|IVmwn]~lA<~ VT+oO^86I>pW\Bq!I FiϽVP+Br(:$Ns 8^_)U1HAKPI֌!xr]X3Յ..<.Q65g$5_xxȏ{4l7RzytT'K5A H4w<8I\ms)!dY5OӐ?1βRy NBP#$&|L+]95Âsy.Zw쫵ֆVk&'3vS TqЧu)*G&X ȤP&+eh٠W0 wZPȎbWDħ $/pD I9aOO-̊\4b1W#QՈE4%9 (S<(`)eTG'VQ9߷Į`EoշJKEKb~Qh?*jEN ɶ֩ ;y'{5u#d,(Sٟ)+σm΀sHZ|b4<;ggyЌ.j>^$<$"#0]d -2CiGX& "v͌ϩ&sops,hq}BG*y |jpcR"V:arTRL%! W)*H2ێ}1W!/L2)"QIf JSV #hQ"K9 * 0~l&ºSO>iH2$oUBr![|2HK\D&ԲI#=`-8hpDV5 wG~6Rsa?8mu2(?&s0'43w~~wǨŬVBW]˫ot߫ԻdކޏsѭkZXfƏ s+-?* K~ߛY›En㴞ޭtH)n7=2䙲>Ĥf _1ǘ&ɥ+Ô&h\{Nǯ[dȜ]oQm=Y M7恊SJ):~E lGy0Fjn/9.<+g[[$Zd5]|Nڈ$$"z8\(&) N]:lGd^)m]c}t \?μ(7E}X(8y[)! %,q|pGӆFq|w6|xb,hRo_$gvUMul* v0е7}]`&k&/@jwI✻ !Uc@iv#E%8EZS 1PKwQO$с>ipTdv-\t5l`F͊{a7/,+:%8yAx͙`/.9S5g*Ax/Y+B>0^{_NZքURdil |&G3::y)6(A5.* al29*MC{'iBhupOLΓЌ(v>xO18n xgvw'얮']+~u4&jC׹z3>ete [Wغn]57wmtfKQmͺClYa)NnG=o|B0 67vsZ|̻N(K9|K]5SF!eƚO)[lS-9&>Lot9Pg}:2F[J4P .WOoK4hy8)킃 ~t#+DEߋ%0NXHrFu*2FD '賕E? >P>rh8 gRdtrT@ dZ N 8ahQ >khmoxP\ W&z&GCF(hSqYZ!=rމ=.zFԎԚlqZePȉۚ'd.eJo[T1.2NIugk]E0CĵׄK i@'kNFO ܂-O;BxxK;;޹*#$TF ̊h4 l]~^nؾH=9`y"2oURmG,|]X횥5%OdJ ![$XLҗu&=I on_`֭\|֭8͓Ճ܅^DVne`]+p$+U8:^jV.mr?+j˦\D:#"R2N'Wďh {2`)kLHڅhp`2Qf$ Dzs#gǬܡ¥$ݰiBa׃&0oS[5C z>aM1cDs$IK :9 ҔDԎoRvE%* Km`u4[mMr= R /EJ>9%xqNM0f]|P%'tO$+ڍajG 5`řKB%*reEHZWDT.j[9 UֱlFO4J4TV=Q0 I$&Ž2MU;19m ;q?#^a׏{w-a!/l.ں8&H:Hu0SD F >Z >JJy „ 37_(睥K@PO+ FMJ w;PP<(g|ΩYt&=o˨:T@ȣI>|EXuiy\^,v7+v>\5xns}Nvfz^8|?ӗQU`tY+7ܜ)¸P@PO-ow@ VV؎ g5yqJ[<\w++$V0O'w;܆F0lkxcT}#pS1Wcw^/WogOsxIh&M.eK؊J8 }0 wMQ$.r9>g wuxԕ8Ȟ=Ⰲ^_( JXFVLk^%rֱ$IO\9P+XP+P+hP+N;F[ i@ R.oB#RJxШcBZtˇ A'빱1$& O5Zks Tz˧9۷|jsZmƁOv>"~-^ۍl0I9)Hq菮 dIC SG[-ZQTޱ:/TĐ+o'% F|xxs%s\$j[Hx_kQCWK;AVنQ7moY;>1uhMS\&0xbs\%.%6gT^blN14Ê{ĵ+Mw4~mco?F})@bA ʡ1%AY [| X[,-g˳pR(,qJ$2$5dBAOhWfz7)&)l*d =L-v4R|S+m$9𓽋j}F{ø1#R"5$j5U<%IQIlnHdVV_DaUEdtGᄌ>pJF * %I-OIV)ӛ۹2Xկ7jM\o^I[4"ܬY=1z}$Iiq(uxy``2J\J$I+5UI{c ȉ}vr ȣ@׬]wBDDhPVSfPNjN (T1/s8@H £:'I J\g5bĵHJ` 4tzb,".hړَ;n!nG~x؜XH.2%#nڪ#Ϳ#^^+j|E׻i,i'"O2:,@H PA8Gf T'lkB)+D &sSh\5AxgPY\dm壶4EK(+1SqpaZ*ÙcPReά-@#QQUXjscΨm`_Cj.B mc$Y5J5xnk~DWӮ`Z}rTs_ D*ߩʩP\ ׄ6Խe^\ٍxjB^+9(;X6Ֆbh-? Vnk73 a؛b|dN ['㣿m雵ey8ʴ{C-˒rc_LMoW 9>hV[n[bVڵOi i<4ZeCOk+|{7F^B?^nzMs VWݠ@{R Όd`%d49m6hw2 Ux(#T@l奷Ub `:x]ça!(tꜴ!:2D!QFy:%*E>$hb ˀ§;;BϤ7MC|9С}ٴbN *%9XL9<\{6vHbc2˃cBO\Dwi4! ]G,RMDey#^>Zu gЙkF>|(s4P21C7D/rxdp[*PHV'S.yg+*Q+hu%W->^_Q&BZX))1щJB@*/K Rc` hPڹPWіLMK6Lz-c&?E51x1r(\넗x8-téԺ)Ou ׺7wp:~DɬmNڊ{rZk2?lrk˺d[=\^Dm $Wh<ŵ%>'3QST$34MHhE%Bn1yOm2&&N9ۀz]4) !*AG.*-CFrYXlg{ݯN^zQhxz/Z5_W,}'0L'iP`t?}֋74J5$D#I$`N@"pcQ#RNC\d{8b+r5Q'T RD6S"ga\Q.OEjJmQXjNjwvӎ'3vSF TRNS.Iĸqm fSR4!Al $vIEVuEH|+@bB2 ?p>&3ڹ9ak5gx*W"Qvvkߕ^h(!J ԍA NmRKYH 5i&4wKg/,Ev03):}*,U';x"kqW`STK.ՇnjվtMF%)As^;%y#YP&W=;K&͟etgD ВWGU b0*Tݼ\b}:#Z]u- c l]$dR*$$0yt.8T9j#'$$L.)q.y0hY/JNA%I./"nmJ)/]^$4 5/D8D$t Hv=IR>MgWBG<Іio׆ 'BktNY2(.3>BIR~lrR h⼣3NY֪7"%NK%wU I\ %^Fló=O??❧d 9"xΙ/DQIix]0 PK2>VPX!0"ZhaGx`HuP q%K &2@mSGJSl_ [7jҿSֻyTnW,s5?]5Lso|L9X~jq4>ؙ*MDq9=SO?_~F@7ptjڑV.fOZᚙ~@:hol 3K<׃henE5Vm3:`Ͽ^]t7漂xSofpig Kag_֯wx ]ggoKVg 7-n샏5eٻvcdy:W&4p,?ݒŻw[p9-z \bkٗU\Z6 dػPyMPr),Ks4Z_P%\~7E]e3r4&)G K]ӰhN oMg`E)Nt[ɃEbP)ꇹ\*({iQ//gU\[ Axr1L>Z}-cdfQ0 党crt4]X/AC>Y[2Hݰ9,_Aּ6jkFݬV\eb#ʵd,pAKJZ'La+<8ł'שlom\'H[J!qfJ (M Fn ]]yu%5x$,KVjPas!P<=Cab7Ee?29/ʵ=gyyޖyQ1){},Hd>C}[ݺq9k.d࿐Jl [Wغn]57w9ltfKeoͺClYayݻ4Oz^6LY١畖CU7v z9]N9sgq/WFco5ß73nM-MSaxL@֙cJەkͯ-ųV_~/Q Qx9si \T^Ub~EKs#+DC݋J,z)EI$匚#(%C]aP-W!F>(Lj!9S2 jO8KNPXa9Si%_7_y˰F |O߼5n۹`pA[L7"1¤@g8s옷w<5MHTYmEA!%Uy |7Go4RD&زqa)=WLm8]%2wUAe`wWC<a04 o)PB v el WAv Z*ÜD;̾f yw'.]M"k$o723NsJ>K,h`[FEL1Py_\Ƣ-y-r\)j| I 燇`i_ð7vߣPG(֟IYUږY_aKs#4X #~1ݷ!Tx?8TņZ kjr}N<4 R-PI*6^R\#b"US Ad+!9eꮸ{Ņ( xN fpL/"ɤ$ &̓ 6T9mqVA#JX }'mDܛݭlqaPXjoׂۙ:::R IpśoqTQ̊%V(}{ߜA/9Ji$]`}ϴSYȝoAov"v;_gv䚷v}~?\I^=SdYM7'7|ůd E{W^/az:լ-[Sf/xʎ/߂/kq31fǘcGuǡ-|uqd⍙bcIw^^?ZWc'ۈzёZ6v5v'i[Be5z~1ISrL//wmH4NH" Cffo3sa6"y,pݒ,;֋JZ@/ET53G ӰF똥buC=:9Lk:fBZxq`f+ d9n-+Z"4cS.蠭8L[܌ΉeUʤQ3XnK-;Bɺ:׮ky[vug}PԤp8m)qBn+n]Ϙ4&:Oi"j1 !᥌NbT 'K/zV^L|꯶;_-_aS}U5E eKGZjJS)~2T4E`iEy*"%U{F*D\*/m*";wU5uwUwW_2F0'H(2ؖ*j8wUL+tWj)]| J!wEs!O]qA*%Hzw5+콻:O7NFBQʿ6O?}[K*]7̛$*+0Ih!_ |}L1qBg1 rgmƄ"&gE(%Vwet2EFK|7 ?WNhs7aaэIIO"s]~ƾAn].Qw1}JWӲnvH+9o#*'Օ0*阯b~zg݇_zibR8 "VR!yvTY[KFoCbIk=DC.$e!]WYVq%YfM>' Djd5њ8wM A֡OMBdJ'UV 2lMmk%twCWsԞ[_''(_BU[j=T}`QxHxW:^}Do\nfY-/MU+S9Ze˶Қ8[K1p0ґs q*^eB2dtByjf, :7Y MzҁjBW!Ŋn c“d:|ٹlE.y6PٮjŨƺdcW1"Z<ڨiRb\f4_1uƮS(L=;}y?y* 5ɠ>xteY>mE]E}E%R~_W/˶-#/.쳥mu6_.N^N/7.gp6IEZ\~u9}{qKԁvm?P-i#w<0飡(1xP -xK43b!cBh<(*xxpHK{f)ŧp\ k^`JKN=ɵkG*_GƷzԒjp^}E]+ĵ%9^Bf6[h!+'>0W\4K%)kRSap'6ʘȦsJLqZcZk4L95C(=cuI|_=+nmnzGzB)h?,­}6$]=JL:=``᯵C>>9FƔ\JZea۔e_p[.e"-fNg~6F^Jem?/"%w')Ņ>mhLa QZ99 `YV\Rګ'\C B0Lv.:IV"MYؖmH*Nd7(@1|#2Z^ݚ8M4_hbu)]lla>F`$ 9B2!P$-$C.CY6-_[lړ_T3R5-/bizxؚvewiPϑ'%-V!Q0 PĨ?[CEFT$4̸m=HEP[kᖴVFۧ>dJ1_8_ R* '0 LDmft 7Ƣlr]_0%Z<Y1,nh}_昃2R62ڜ^^f(0SHI% e?զћcwUpq?~^ݏjžo ~SMo[y&]Z*uB|s߹g/JֈN'|3߾]G3vUܟ7z&/.|I>kފ"7ǴM?.B~b=ĆLdCľ5ժ sMkFk[![)TKt)i$'s.ke4RHGg'E@]tr@6O]tI 9X#AX3Rd.2Ē)*% ;&2XGH+I\ufl[:1N]+(9sE_MuTR+~'TM䱘<:]@~?}Q@WF3[I*LUW,s_4t6TQ{eJ$B)9`#Q,Y2WZӯ/@qdee pٻ6$W:qKuPnz3< E$-P5}`h&P]ud{UY]yc &CU P<[Ɛ#7Yx4t!2w&r]}hx쪱eQcªZj.Ӭyv7Rȴ@EQZ: C^hrl bihdb3d$33G/9VȢ&Q4@@`1 .8DB Q*a?$5*方ZɃe]W+?-?>Ff9R!J --% 4gOJ@GgOnYTG p/0X3m9sY%:ǔ` W8[XH{4y))qJO b΂wDðqezښ8)Ѱ[@rmxGD508Ac1yL*wRGu0`"oiSadI5J,:gmEyx#4Ԛd EPl Z"q[s'/ ʫm7'7Ro@_ױkH &cM?st:p֖tB|%gS4aByξf[.d (\%)U6W\aib bHe%WY8A^RV 㴇h?o 'uQهi%%=F{ Č)!9[n!%:~L?N\Nk%:L?N]턙֏0:}cL59#s3F\%q99s*I)e6W\L%>'sk\%q>sE0Cv\%))9+*Cn0a8)釿׽fr@$or_ ӯV3.?19`ÒM( VdȪRJMd`S%\ o73F*2D8cKrs'fk5-4!zOK."IZr%ghZr%gh9{0$W9ANyS9ANyC9ANy& )rʃ,<)bf)rʃ9ANyS9ANyS9ANySdS9ANyS9ANySdWD]+s&j<ݽٔ /{Q,XZ1,HF e/yً{yًUJoc)Aid%HbԬ&IZe*դPC[:=V)n[EĖPƽuH Tq4 b[go.wMo`l=_of>Kx챡>ͽܳ`ӡo.b2^= ^ [ǫ˦Wk>/:OA%i%pKA%Sn]c 6Cs w@l=w*#D%Zb8Kц3 (Tˤ1Qa!(TG*t(joFI*NP-t[X/5^#Z֕C5;BQxò AQ`-X[(qBI6M /_A954t*:q`#EDM7{,|uKʽnಎuIǴb ̽:&^`Ɵ ,X."XRY:cHF~еgVC[ipfPqOr8 턏RݜG8šF WHRe%\aˤ \_HʗD"7i=6=9; 7{/̈OĈ)SX7t,%xgK9G$^ iWLR5 S}ܟ]/ `U:'g#slvq\䁠 6!jDrUZ x1f/M8?(&cQaA;V$qJ& ?]E'dR6Z)E9ju,n(IJ$<ДY nY0]݃v,w {?I-o?/~|K:nUW>FՇ?=f_] pxc{?{*6@Ogb>ӧaٯ7;T}7}m}o6^,A&!{iw̋ )~M5J |5~|XG=vrͱ,mU$~p1N]Gi !nzW ^j\W/z+f_)m ੷Eچ_N uMiVdQKXcIaiaI/.}Un'o)5K3%[h.ԖVnN yL:lZ 3Cj:kh`Q=g3׫#V{ ';NԮ^oc= ەud @ gshںviهhnbluY3 pC]0+KV"ut G{dBN%{P>j5߁DWnj&e65Uxכ/j>*D*$|v*>miF)OcU +Zmf1x3o4o %Vm?aI<׾b*}<롇@aN@AZۡ[:|L,Ta-o?.(7>;~-h|D"s3G cL\?J`kmH@\~ 0wvplbTHʲ~3$E|lJ40L*ZɼR%Q8n!xrOP3ce"X,*!o]$| )M+T^lMlT]^4=םh!pnqhZj\q]s@e/Zŝe9kdmlHu. ab rzy0MFB WD ;Mw*߯ݢևj:k֔z.hCL$]TwItOZ#UUrenO/cu}.{.VAzey!V_ ɞmGl۹y\ ,}n \Y^?z Xkm9G/uz % B zq=@Zkaw|WuA KH!pNL09uva=*ӥcZ6"KFGυV()5ܙ!ND&2GiSV{0ktr^7~:}|i~D|qPdz\z/{xEچZ^S]+@PPƚQmk N:X"dA'C\b s@1A$ưt`l^u\ U90&(lLAVV1H,0:g(@,J&YíN*d0`>Ldͅ( tpJ=%>AQH'"4*C%1DzQbkk} ,2dirVd|IsO!Z橏PIk  N)}aE-&zEMMqX;dM/LQll[.mZx z\Nkrܤss<-'ܩ)I?.7[pX|3AO,e+C\.Bq!IVLxX2JԶs$xKV'\WdCtJ`U uR*-c1q6V ,+2l^mLܷeF/^lׇ7Z>mzגgM<A*H4FIf,$ OBhk B\8 wq&Kpb&܎h+rKbA?QCVǶV [mv`WKRЀP @NRT2.M\ ȤP&+el=T;-(d4䊨T"!Nc'u!I먬-l6>ע Xbc[( [DY"Z*Ș25b1C peEnj,jVjrxtN)!Rr(S<(`)׀Kx#fU|p+l"^8⨞(!u[%EY..vvqZS*R 0 ^9&zI**fa)}CVǶPj;{6^mGnO_r55'0&r .5x.7&oOïIbBDC^KJOшArWf~um6$&@!@IQBVED hW U! |@FMP33)[[rT@ dZ_V1qCG#|z: >T33m6%|r؇x2&z&y9 ǣ4A'OSej8DqZ)o坠GRnқxՒ0&kPu @7/y ~ca2iR 5s$ITK :9*ҔDԎoRvE5*1kd4Jc,o5!K3(Ř[TIb8p0ehf׎V.cŜ JZ+:f@̒ŞS~{_9 IP'y"3ą$s|̑uc~% a @cpHV.*0 q#\w9J\{X7yfz7uBm4bSԫö [~УS)c2غMsm8њڬf2sz4b[Nn~U͝y8s-WLozuu7Ye _^(:[OnZuI 7'w: qq-o-]1@Ðg ~NKi?8iD$ &/O7/E. #ڴᗝf]7T?tøq|n _i`I)tǨdPEѳvgφM ~_#=mc" 4ɖmA i^Bw؆Y&jo~6Ss"sDMVMr PzCՂˡ= .~ lN+\՗p&j>vyXMٿkH)\У1WY\ʏ\ei,%t)+93zdGGvyPapP?#o fj*L\m ?Z E WKh2cDMwo\7Ϊ\3&?:o0LJ.l|RmgЯS~oc-)sZڀ!&*5lϪ]QuD# ,x,T={d)S3b8oxyE8Wͪ^_lm<"ܢQTi ! VM#mHM 7B2}D16Y`I&ƦWKMvhMr7w16O'BSsÁR*k&"\e);uɘ+C+sg ;Vd)z5w<9|4FrCęg_p0nNohMtEQgLrƨ7iXRq,f:Kԡ,%~fZH ק..fګ7_\MMc`ƏTwT?~.3˹fv3t7z$nKLd69˜l|89%h&0|PFsGiÜEJdsFӶ6SOA0c\ˣ1WY1WYZI\e)4W \}REs3"w?zv6Yr>SJ(79=ZF.?OPN W 5֐@N [kp\$/횚/{W^۱+R NRNg.QDNp8C\LqU+`R$hP19y&}&f4:dyŀ"8p+\5GcQ`(.%cR>I+JvURB(Ε j]Q-:mET.犃lU+ܳMw"pO !zB @x) Iqo}EK&mHb$s9'aKmJ5i?Z_|aX[UFαK">¿;%ֿ :B)QNP=RQG+RTtM]&GSA8pp "N5hDuu>~_];0ц}\52zq;o)^6$͞@<(S< !RD8 t1zIuf8h(%/S0%=0"ALj) &eF2 ,Z#ՁHO<8`.M8C*WG-)8i0x1[] D/4b9(q *Y>,ZdT`VϴqcwmH_.n;H!sn0ٽn6(ȒGx_[$?([I@]$>~E)h r$.3P>D(Pz`x`&YGЂ32A(nc! TJ=ʜ F)*0pPY,Tvm++w @ {/^qEe`&4xނ 7̫͇>8#ROp丼A%vYr0[*5>4Ng>1uQVq:!3bYIr^d6 b0+gy d:?I===|mcy@#cZc*ÃǪ?[{9hvw'i7cmiƐkR G=S#*J=kIXګL#,4ѰL,(z=&~'K:l9h>%aqJYx\2"24Ƅ U.aR:5RjX:`g7YeE,B#6&nJwiZ?m֊/<܃7Lyn{LwS/U! 5RO֠:[b xskWXFjeI>z<S2MTbtO/:}Ou +Lf B@Lh@()V{y>8ig@F щYpp w)!2!qU ad 2f}3]%NjoGN_Enљ̃:P=N!2ki0+j%*Cbar~z$kGh38и@ClϧS*TUoW %k]>_LUׂL _mt',<Ƣ-$6&Kc<5F7e-LdH? +xeoo60|o-k1BȻEdOrSЍe{SKhZ9wEVFߺ~6^9`3gbZqJi&#\RXQi՞7UnTd :& -BT"ڞu}Ub}N呼$,FcR1q%l!r̸9 2kZ75+YO<&=_eoL~Y2mo?zhujwzD4#Kc@({?cLSX]] / so~yҥdz8ٻo'd~"8Oqx'vF'z<.Z $Y akk΋ٽӂххB~@ :ѽ,tQ*\wj?-Iݻ4XlXeoKӯuнhqrXvCdzf~>:= ޼q%=RA PkSɯ^HJԳ#t _z-ƧN9MܵϬT*$7Xt8\{h{p{uv擲ISj|.'jBAZ#ȤC6(axI'fDDht`CLA`Cp\)"dN~19x'@fjqV>X7(ɉ6R' *\Be,fy 䘢ǺQ1piN"l%>OJEU #!C|)0+j5qv*O\gE㴜`gƸ<4i[d Jw|}9W<}SbyT oRVE;lsA%\$'w X(akڰ4fD-(-+ tڔ%=K\< )ٜLlsQ/k#c5qUTjtgNɚf!~w]ؽyv1lsw?M>OG%Tɀsr4%SyeA#K(ěkim"2+j$ AHQ`SF oǬB9$#fFql}ٍa2Nk.桠v5x(jʨz6g^ v#sNVU ZOr9tMH㒗\dT6a4^!nҬ 8rE"Y$}U$dIq$ >q&kٍS< b5x("ʈ{Dq<5TYIJO8Isx ! Qغr (Q+]6e%rB6$ɓ bُ[;^-qv# Ç4YMK2.{\ܺ\ks9$L֐"JiyIB.d$D qqx(xXM;Px l+lC֋O1rl mhla\Y6k0'!*F1e+?FlLQE0rCR)#R|{ށl׵ qjllOjo" ,ך՘%ƬQ0d"`@ޔL60c3DK:>eԖ+=? `+P&ǯ6t˕7s|/sdX9%2&dҹ7AAe GI )D*ݩ.zF~X=UJRU\yq٧˧K\rZ#D_fk;:>~Pn0q4o>಴;׃c3_ߔY#* az֖4C/tU+y4訅n ?{g^'ulhm zl֕6\ɈthY:e9a`l]U yH]ʰ*vŤPQ"~\4 v?8-w/ U+xKI VJ$0.arNFM3Urͮp]$_B28ol\Z,ef ޕcEO3خa ]уm4gv >vGܒYU:m]˲brG#()4&B6ӯrT?#r(ۡ> jzGg \zΓ1K \eLe"cZeLdEL`%#1N.Ǝ1qO2ֲ}9i̱fHZeRQUe?s|9UXN(YժΉ ,4Ϲ/٬ {>tgwx-k${rG~Y[+QM9 <2yF%(z sIb&@5".HVd e槼7{og؛}zBeOg8M(]Y?y>MnnSno>8~l0)e_g [X{jPwtcEo|y>6 u) A* WvyEff1%򛬈JeHMR@爹5g ŏH chU'R 1fZcv,gG`@EC&E )Er:`mGa<^{S½hmPIf驛$%oG$ahL6 Xcf/YmIHV'S&h!bВ3*p\~{!.If BiV䷬u!5 _J\K 9p-y)d u wY{ձ~2*Mp!.HO_G )ˌ+\'cG$c#8C~O!k/ZCt//Ak]͠n;G9a; :qd}`:M% * *(8#ݝ>_%bV?-ڽeiw{+FnN%Y~0!蝖m9\'i77IM7s t8_%rPL)oSɌ5G:,WN4u5<Φ:AGxdqF9TIuIp:#^﨓g!DO4}Ukj}SbnݪކZRwuMdb,_n-y8b_ js P׳l!nSNيaD23Ea#()4&B6ӯrT?#r(ۡ> jzP(} X.=dJd.2Qu1i&"&}w'W]huۣ1XVuv|qY?|OG9hz΁ZU6hSy`+4NqR}I$-VI0*)%T*xSrU"ZLB)<+ko7JtWv_>p,74 DzvHifJ=bn3U7I4(Ҿ:Mi KgTN^!WR 4* - *]=yNX1mqRĘ8JI%$Ιkk_̩Z`#Y;ԺZyqIKn(f_?x7meh A'zBGT7͟*Z~!0\܅J1pCO:hBs? !)<e,u,A hI%,Maނ Nd1 E0t#&PŹG +\d!C2x Ƃ0-ǶV= N{ϑA:;3ߎ(N$*o3`2r<ڀF9ZύLQBzh(u݈Ҕ(PGINo <+)NK`-seAm8OGx8kiX)Z'T]e{LF׵m=Zotp4&'KgX7ۑS!lhlo%BoރJ!Y!{,-3(nl\[ +Ub(ONYΌ- ʎq z0?]6eSr9`1VDl{fl8{n Xȸ\زb\sDn߬/RsܭN>IUw%d9p"dAl":yڴ;cK'f(d/$$Lhfv,GX9bfčVqv0%y,vk㾳jyVݬ!ح@A v#sNNTBF \HS6Ƞ0vA!׮QDƆNsUbrޕ$ٿB ey0vitcgS&$k}#$"K*Ȭ/#򈠀"j$Hdh'yc7u!6QY06g7N}Ν<0bcPF 3cm).F 8fdl14 ZU 3jє}(S<(`) k@*I3P* >pk:wSpv3ˈCE\٘<eü(;^xqvN))E# 0 ^9&zMI**fh%YÂSCacP>T z<7(Ðs^dO~S2I\(Aq;xӏ ?ƕ:$x(faj0 JP\Xdp ^kfE4 C>{. $8-GϢ~wZ\eز Τ:I%0#rg~8:UUE?<s[3M) 7gUJ\/Ovu®?'X*A2Fk>y= {āMGEL[pHUKc_Sgj?(M ?EEr64.nm.[M1UW[μ8qPݲ]/GU^Uyw`>Po{k̮'iE}|Am^xrrq$z5F^C/9k>'/7>kT &"K #XWhIL}J.%sCö}Eg}E8~Bi\D a:$]jyR>xuP3 AJCLڅhp`2Qf$"=&bYi Ξ&N{xjt4ί&5.wǥC*SyO=hs$ITK :9*ҔDԎoRrj7T$52J1hښ"uI{R /EJ4}185N;de3KXl.]TW9tQf'3YV% ,9;ۺV»+yvyn_Yyz֕器]E-y9oewN;(ǁKϭ͏kFW|4}wmP)C": u:Jԝfh*@ p b|wfgvILߜp[Sǫټ]40ēӡ,ږHO71`4"iNk7DTBM#@2>9-Q`Ca,˵w!&2T;<8o#YMUR(f%Dn! c9|T!)Zr.\݆;B fiă Nm#B<@OwR[0DdtG<x2 lyhC>y!Иk+{D5IƸt)DBʱ gtd<0O6׭fd[q~dhsI| hXGü&Y:9iURR B㷦7zuݴؠM^FQӞ8°?Ȇ&H<+p]vE}Ӹ9֜RNg.Q RK4 q1 uT1& PJ,t4#_s9%nU7KrQu0v?1ףlz`L[:fxy`zhǕ 09lNؾz~ -5A7}rGh%+D?JCBG@\PT#FH/KO4ήN%^cxx OZ↣/vݾ:}.I)`* n~P/5++"@*պ pQIETW+w"pO !zB H>4ǓNI.nR!#WJ mԺ;X޸# Bz&w+3T}iJ.ߙ)6[)=e;'::u$ڢsњs.mI9 y)2Jźs/0BQUlړ!BW-#NW%.CK+-9Etي:]emVSRˎ^ ]"r_`W˹{(Gv-ߪV}Oxo_&YU4`ogDAg3|LfRHƨoE4+h Mg5V%:Mgv4lh95J}t`Ǝfw?XkvC"'AWtE;zSEZCW.SmUF)HWL_ 5bܑgso1ޝN.L1IR[ß)Dp'tkN`~M{SIk̽Q{tZJ&ʀe{ WU*\9ˡ+2m+ WUF{%C:g%ҕ4j3p)k ]eީx<]e\utJd+ i ]p[s"e'o]e;YJI@2'^+Kikv3ZNO2J]@2԰> "4f{[HK eCI)75.(FN߳80_zQ|:y82EQQJH).Gyqs {1Og?]Z, 0Vcʩ_D!&' VL16;̫ ;P30Wxs&RDmR gsSQ?JdٱDvc!$x(fazjIo#һ',$; $R# FƆT6ԣFh֍aGvB.%wy(8 Q 5"V<6M)ʆSYTcc*=SY1A1 7F5]]<$޾M!bKD/-ze0]Jze{wSJp5fȦN)##F=saq& GDd$梋LyaVFvi$ y k4$sPI࠰;m<8v(Cd(ʤ  pE|)'e@Q&3/pjŞ '_SmOp+ȆWH0,k5k|U$QL!nLJJ'lPJG ʫ$S:& |E3d:%9yDc[IT~GйT2ʤ`!i(UѢ"DEVN < pRPp/(VFi5=#($CXrޡ[\SRq XD8:?gǸR`-n2 ?8e"P+L xZF4M(OB8)RL-Z䔒PܙE{@ RA0ZJݙZj>0e9؆z%*Zwqo0 R~ 78Ltp1§\=5\rLOM ]L] {׏ɛw{Lqfo@i=sS˫FX@m.˛:14FP,?huzhy houfx{6MrPlUNk\WОzbce,rk*-^;\4tS|XZi:bذ<V,׳ݝz{vJ2!O[קNG~-V}?٫5߲(TE׌J.G깐~JpWWwޥ[DrųEƲCEuQ1],ֻ0G%\мu?F;C]ezs˦TYt?_8%E8*3T[ j$w߽I%OT,U٨  &Wۄڴ?}ZK|Qpe͓pUcM,Eƪw⇪drbmxkG?*E%˟B9U 颇n-y<7PIGĪ[[_$c[次/e#1HIHD<.J/I rdХRwtJ.e{굏2R}lj'; AQ83ٻ6r%+^-, |aeސHR7-In;b}\dm\DbIhY@T _ڔV;[n[sP]f6lS#Q6',P:UG}AK=%_tq (j_XTؒ/zcT5^\sSzгW:V } jѨξ6<2,</1/Ү-eE$ ÿ= Wz7s-v}~ڽl_%'і{\شowY[=%d->]l'u& w܆{@n=uG Q)D _S)O& {|H<$^rJ$>X^ SЪBl]@uMתTQi1ʓ4# θ ׮ͫkW?l`0cDX'eK*P=cUwL._E aS`1  Fc@Xb "[&jwq3qc 7(< w $d#, JN1&*gr&B$0_ Rؑ>JIxWM FKams"hSs U ԩR6EG>G @="רaPac9@U8 Z.t Q{azK딲jTFxsq._/qI&\Hζ 2>Άm8 |j;\hi$uCc6O21G:5_,}8 >z}a3w}`w&6o|g\?9*#'U2F,!*AVQjrŠ,d'یJWdbm̕Ho@Oɘ&Z!^l0i@ɽp#c?8 ;;aInDn6f|Vۀ_?vW 4|]-n?8b뜲-T}&-BQT*LLgL.+.#&}ۢ FY"cMMC@8ȡSX`\m g?b5ߓcAnXԆΨ jO vgbFZA 9MKY )ƻ+'7:R&tPd ZQ,RpQI q,-W%X8b )unُKQW\0͏cuFD7!℈f|(Q" `C$2DBN| b bb9Q+6.m9 g)#Ȁ`΢ :#bGE^5 ..qi,9}g\.N+QEk!l|us R.e]C*Jh%hp8<6;qxx xU֤yP8ٓ`Ft< n~<#\' tyR.t6O'Re)T($}Z xr|M|˽sORmw/VUᩨ#Dr l*5k TU^gPGRCZ!XsK"tq"*ҁٚ@l:SnO7Brͷ\6ͷ%U\緦\ OvϢg'\>l2d"&&[օ֬Hͮ% 2,37x}Mex7MPM`I zbyNLArQ?2;>fW;.!#/_-]7*{o+ry{9W ђ}c"cLɕBD]j̕BN*@ Nm. 0H!b+i`޲h(`Tlj*} ]ӄ#vJ-zn>wo!-;GmQ=Y$Lz#U,VHt\ G(#C1ADUuB x' =|4Ávl9!IgL#"M$6X8õ%jDD y O)ڼ(EAOB2[Y EcrJ&b| b">X؃C'mOFXR-6{(A7pq ; xiraDaܣJ@ܻHaP*9 yJub TV Gg/K{xb,c"p=nqHU%ʼnnǃYd< }G=e[w,q_#I]t!{YԐmQ^s0AL@)PEgkŘr""\j:OAiڤڊCARȺ AX˜&$k]\kCԕEk@g[6wRRc)hh Bp^iص-(A o%?L}"dS  nc9N5mke5eC!C2#U>RJ{ w4SuCиFy]-'ưq'EF݌vXY# cB;EAK&C*\*Ydc#ymc~b~簵:ڳm]Av9Om^ I^|[][\`I@̠ DyOI^׽U^ zݛ%%a lb#:еmSӦ( KEYF\[VLވN$,:G-1h}\6*ks5DA7eknOИ\\qi-=|OpTV:SN8AԇDL>:WQ @wRL$ -YN2c-#kQ\M)m[4 :DhP+<'[WcMP&؅ BihlGbhɤߚo]}eZ*?<%<-^wD\yş+| vUi~u.I@Qu AgAk XLO3y#~< Fr4WU!:2Q&!9b -xʢ]]yfDB%$]|1k,WM:n c }336Cr-qyL.o?ِ{0bW`w{5U扪vn7U}H`~ yQm2pT&k^jZ3dm`dam21ivvV\˴wM~ `npsd0h+#K$,b$KZj[-Kiͦجbթl_]<$ϣV%/~xIYD[ >nu.{ R;2aq4YLB)BW}l޾9! >7g߹Ï}p'"Tף?`1G97`!j+H|O6U>0DeϚ9+cYM$9 XjRq,gH )g$rH"h T#هuxJN'[d VJ|AtA[L EASSRIqF8r섷᝹^Id۔925 wC~,c1˙R %oQK/VNbaE09 j9[*V7'ݮ ͨsjng$=ꇷE]< L;Ǎ4ᣱM:h`6*b"AJDCb:tTzBHHDIX?_wA,`~]7vywCd+|ލ\9wW_Ïݰ0?{V 0Lw%f$}o¼!0RL8 _,ERK ںv8xq.\̌ϣѰO-).%p I(Os8CS]Osi?F& &b$P0Γ1hJә`G7A0-d85>KryO GØ$uI>5J,1r/:G;M%dYRk3D 4 R!=Cab7(j!rsp[*PHV'S. ˝^iTfrkitˣr´+ɰowSV&V+rA;WW5˂;9>=?fwt=ۅw=}roҝ5uh]2h7.m+~478 R6I`WέuuݛGwrۦQww>3VrzȾWwׁkv3]ݺ㙫e-0kz biοiSM ۷j ;t9gqr\Gr%rb)?<4^̭ )/mr$ ) hqWB #As:ӽKl=XӀkuDbY%zc/rt݁"wH;7  cD O hB:0ts q19Si6uiPsԢR1b愍2AQ~7,0J l\nJ\>}=/I5oݧy_>ak=;~olٔ-rϚ kWQesf,$ƽ1dօ%h"lH^Gpy-ϖq:p \V! ^@F|pS iE4sJ%efB|BC}6k3?&Usenso Gsgx¤VpM:2a\&ONHyRB@O;g4^NԪN6cheD0h0Ne4*#3ѤŔhx@u͓ /(z:HqYs|RυcLq"A` |hIxoW*+o'NkuZlSp`hנڶ~^jCg5z^Er5oيNHF{Hn|grF·p{zuC$-SwqP0bB.?yJhY[f}.u]R>KVץ2?`ڹ\7Q\hZ840}rJMG779lq]`{_׽4]r^ zdrtƓ}eA2Qz04~Un`bOno0܊g &` ٴ%8cQNn\XZƽug3dgXL랱_C;ZPKo ؛]H? p7厤r,!:ۜw0}xœN% 8 si<ץ\eJ>߾4!Ε8M`#a xVHYs< 4\N6I5B⎴*+DA@SM ;\dm壶4E5D˲p> 2P9E(uQauV2ł2KRl×V(~PUhBW$o|&@<>{$LH 3:9,蒌T;oN:Q>[ʅQLS1kDn5QfYQgLSCϬ1!K+mL'c'$cc1!cͧLv8Vnqu#7_|Vb0W-N f /#γ$Mpw+ɲ|)e$H="䥴q#: a)i&Z (!!.k -kuR>$J")!d`$PO1&$?L+MxjSZB\0SWg/'{)#*o;s$c%ퟄ|!($~1ΨV|A9Bx;:eKt_j)Hڕ:y)0D1K?+\3Y$]L Q[gDp:q)Og™$p&hBee"j BQOi7# 6ARҥVqMH錧@2@ Xg\)bNqN *Zƙ -lO ]-}Ռ_cW5}{4TUןBU[r;T]0ub:z,y8 |ȓ2hE %1NES3*wM.23?m©t@ ף^D|t)sfݎaR7Zf;yɯF̓SrdpYFqcBYrV+YmϾ8,=] Ѓ+ ^$mYŴ|3k } 3mfL`ej#lp{0Q󫽛23'}}zo]ZPjݏnNjތG^fpgys\6CSILK^U! 6:=Be6ZdB f܂ϲpg;_DTB-a<0 ;Ԍ',*K{D}^VbRx;tj^J7'^$SPIDc8B­M8奋ڋf:!A'"νxP 6q(~#)m.*H 9(8P\f ЈRZHx=U$1\*{G&?CoDR1KJh<,H.pdN"LF'` ǽhQЄO:AjWCeO^k { #)"I6z Jz9FGIMi&esJ- ~XrUn4~~~.ǽ0Ÿ0_̯L`Irw F]w35Xu.*"9΢e"R /Wwo/~}? ⫇yQo_ Gw}Tpi%5zyVݫ-& >樤<ܗ.{ao߿Q+ᲙW5/c  z߻sh1nru;a]SV5ǎٻ6r$U5l'|) .2bfo (lm)#x_[ODzlm wdUWbկ f6(|ǦX//Fv7~㌄dUà@y7^%tuはݸ_?-qbSF@otd[<ל<6yPaBػa$~Zo8\?+TviƦ@CMcF&ᚇV{pA18+n2^i77MCs T4ۥI]Rd/%WeLGrGTbRsN+hVjrOdѧj<8nB** NeO^-N>}rlBD3W(yC/~h\ lSJ韼zoxSؒzG]4ODŤYZvNj:WE?n&[#bfė"H ю(tԊ~O>@}IZ|~ KyMf^E*uSZe"G&t"D2~-N1o'cZc~ll41m: ϧ dpGKGx6!,_E%Z%Q2RL0LB)/=$#e-t##:&ZBQPaQPCڀj{Di-)c.1XE2Ȓ $W]-rkDSI/ΫpuMzz׋n=@@@&kR/ueH1ZA( ^| }we}GPa{JiH5Nd; 2N7auǧ~$RED;z2DdTjz=B}=gXOpFXU!؜NB=uUu/ET:.{u pa:!uUVdU!WSQWZ<"*sTWkU!QW\w2芨Lv]]*{u,ՕSRWD0c r>uUݪ2QuUԬWWR]96v^Ԓe`+p4_nJC-ki0.FsL|FWqPR(PKyZ-_REKߏv+T"W'.Y?j{5}?*eԴ>v#>KɨB P߫磮¦lO]5;uUkOE]jꪦ9+ltWg[>l^ؒn(_o*o[Q&զ/:2 Z8IdTVFceRXZ=)=ύ_;(~f&ty?.%q_Nݎ+9D/mb6L3Hwv>k&b6l *,c9ˁ< I؄%-`wA۠%l_NvltF8F8.yu#1z:|1zL)VQg"f:f+Xּb@VOH "MD%4Ji: 2Bp#3.)]>;$KVD1&%#.r[r*T:AiQjm9|hÊ!mZu­۷15YCo? ` ?sy>NGxR/|z8 c- 6*KcȠPX {>&cD@-q25!*Ãqu2rB S"1T)l4T* ,@&8IpH Sih8=h (x˸5rsYt< W[ KϏM,4$̧l\>|{'jĺ`zmv>B 49F"Xt&x)'2dw)Kl7[-IRXacV&$kr2@guyHRc9$)6[J ݇P يcfE~Z琼D#(m3]b>EfH;}PErU3F5^ƺ$cΪ1qDt>yp 9HnyH !fW 7z2!P2b)dY؁4?Vq ݁O%+jyQ\ i⋲U>oA TZY)dE2M&8#˻~Hࣼzv'΢,UM2N)M]6ypAdS&\ni tH(%k ̊QJYkC`,.9u-ԥ}#oҭę,ߺ4'v~ϾW `n^{DGp&]'NM^1J4H.ԱhM&+2Jy;x 2sPVuϕϕ ci1hƝLJ&̃M 2sF$Z.(|Q ijGCw ]gyUO|k5>⮕;~zf >zT'tdcrO0fTc*c>ØRJ`>B׿ ,ZVAFS)Bb"zRF>5r܎s]ƔZr8\Lj9]pu:Oc&5+I7(ɎdzZ3_ Zu,1O(є_뙈1$cQGGy.è?p0̭aϴ/hW,[搃*11\  d!TV 'P~lz=8ei"ACOx$V UN, $#u B7 Yv4rɅ._ΗXoU61Q'G\*iI#5$&^yLtLrhMKMCr#GM0MZQ%dJJZ[kA.D]A:>{WAz#9!IEOI`W)yIӈś ^_R媮h%ӿST|Aluy05/7՝yj>:SqTiC{fNWOӜyٛtd ~Yf~8j~{;eFE3Uۈn3.˸sF]F%M{錕in7~{&nk7^1yq(-oW{kDlkͿ0|1VBxb7b2,8?$ ?;K^2tNS}ؠy:V[bqf9k7w3фJEy7^%tpo7OC|{KX!1]wy6|95' m  w^`.ipj6R=tۥ&Dm5ikZmIସcx1^ VȦ4Y7nDS]Ӱn&NqK}_iV^& brGTbRsN+hVjrOdѧj<8n-/UZ/ÝZ}LW؆f^t{ljxU|pT~h'o"^)$b%MzL󴤼4>_N}ֳ[]' [-7lq37J6ˆ,ffJ27TRi|2?{WF =<"/}bm4_3h)qLjl/od(xHJJT,ˬdVdUƗ_dZ9 6;2z-12dߛlO1/(pӪϷcxDzB?OBwNo=[^6Tq= jQ $Y/ c>b)$e1r t ̙`w*h|[M= B1%s$l!lj1p:pr*kʣATZ(1=X 3N"Mɪ]D[B/"Ia6VVjVT<7 d/3Ey@g !:S<8x>:=n#h99o;vݵ=cs2[N;䥭{D9 Rd^3dB)lHYV3GayDvVEFMږRomm `=K _ dso0iAϹ^"FjQǭWXVkf< +; cAZ2eaiqN>:l_4hh4 #6>J Idh&KV'CyY SP%Y_j*)"2=b$ AHQ`Sj5 L&ێY8$#fFq䱵ٍa2N+&池vPԆʨ =j vk ]˜*Aٴ! \H풗&ȈȠʸ^!nPDVP d9HIw YԂxRDD1RV8hٍQ_jM 0 "VEDUU="n5 a@Lsc+ ɓ'\")ސjr(lNg9cƨ@r2LH!Z 57mdI 08UFjFīsWgM\ǴYm<ue\=.AΉ$2) w@Krp٠1)YA,;=.͎⡩axxN\w_4$W>٣??1;"nNp3y?Z,e5 1R,4iz2͙0:z'ķȧ,n|.m 8s܁ "d6!ی*fF ˝d̆.@:3K<| cd:)*,pLQXb_5qvSGЅf:ۏ M>[Vs ΋ltzj5IwD.MDD9&fY0.V[C84sꕷvFӆWmpsXm#*ľb෽}< 0"Sϑ2q b+9,P4 YjQDkW,98 Վ#z٫^E-H3F'iRSgT}<'錌ln}<~Qm^#RCc5s VӄTZ$M9Ϗ.r^QKYYp{}4؍ЙD eMjy``PELkZ<j%"qN\;G ޢS[r #/(l&1FT9n٭wd7vutw mS,6eL(EWW#mʊ{>+R5я±t+Eo9XѬf2Cv";O S Sk!hb4#2$2J\sE8cՎѷ}-;V@p#Vfۢ^ͅ. oޒ7PZ/Evtzf%3OZ6rlw=4yAyP-nԹ>s~,5kuvu +Rzj.lo lجyJ}n4mgj-6.quSC}A⋁nWi*o?a;5/iHLT,M~|3}g?2ӣ(%0mq;vpR##ŵO|˟%~KGJ⩀sҋV2HrAHYkvs JhA1u{}}t. 횮[˛.-caEnhkCqg`/mD:Xlwl?e S_F4{j{ y;8:^Z"r>7W/Asq0k4ʆRgxq&MР*4~l|F <)ad$ 1mx62:Q˵7&P~,ۀB<1vY4 8}MkG,kjOqQ:MWE\{c9%_ty5s?ؾOp72@K/Khv>]l~}.ͦ.zhbS~)^7ݦ~2Izzt\_8<8}J1u>(\ח]__y:4(nqTɻ8Op:{}'o+!!T?uCFhqagt=aSk=nkqHq"mi]x[Wx}6-ԏ6̣{ݯe;Le[̌ELU+'8H`k^"P"u) q^ HqI;-B>[L.C1YG]{ݾX15v Y_۾O?*!r KLd19P*B"F%ˈdKc{٠'yy8*9|—x8Hkt(!9v,? ɃIJII^153_^KMjs5eyc 'e(#R@V[+O( $?tRE\NH屧*Rݧz餴N)]؊+W2u*pUHlWV+1s2pUTH+űUxWt\*k}2pU5THI)Uy++n)d[r%|?W-F8u{q0`꽒;?t2_4M4Gr暴C;-Xk] B)"U'E\w2F0I+?|RUtq1{.Qˬ}탙0r}t6LqzKjq1$`2ƍ i~m:G5VA,}go30+ , x2:}<h1O -\{tJ%4|0:Ca ߥ]N/T'*NoӏoTF9"swm,v~?î\ 6.pAD"b[=R!bS [挚5]էjNYnu '1bePr"kW9*7R+R[0EE`1;˜Io0. &cѰwڱbhw^vlݲ7NMZ`B게|%gS4a_dp8=hWz4Ias)КcG='tf٘$.6*I{*I7ךWdV\%GP h%\,~:X]u|ZWpI?Ӈ|n}.w22LEqA统zYiR( Mg]YU5So.S$^9ɰ}w? $L-.J84Κ|y]L |LT@,*̥v_e3;Ovs?Z_ Kf\U0ʹٻ_%}q_mbcq*L_>+ ᰥ0u ]}7yhJ>~))p/Uݝ}7$%B!  -,% ?}r}:]VV!ƚi3K|D]"0()Bjir[K>Q"x }3rq"CK7JxA;؈[$#TtY 8[$bkxY* ;qӔj+oy:vWm k +Vؒb m ,X- ,7q.6󈔙&6-ɛ}է{)T4U$gx}wρP0vY4 6jRz\@9!bmL,rW[BpN/ |߹ K-JrNgaK;{nk?װ%WQgl;Yu.#^f9|C-pMjZ8FڣTTkCk@a*'#d0:7z/z۷I0E9[~H)ނ`9`küMc9ÏorrVaCɒ?nBaO8׎S#ȩ+xJ*JtIeeB\ʼtˆ^FcD2Y+냉KM-a$EAlm&/jzOF1OT7Hj( PKǐakceRa"BYVz(D$Nc3Z`(3*h2:ZpJ`d$;A (Utdll8h0Ě_U@C96`QzkHyazOu t@i]J*EMRpRaqmI"D!d H92BYu9;cƌL"^ˈiDa i&gakj_Dd|UuQE n;4ZMnh:ma[_n31an>`8)Ǭخҍ-n V} $]14oH051Kcnޝ rμƹ]v>g#/ܹ~?XUx&Wy/g5P˽lB_xٴ5g?usX^sM@5NPCY\PBBEy\Yd_Nr$Rix`,(U0uV0bҺmaa {f0 :lJ a2k,:ERaa n{=8dblY%za=c01Z|du͉"{h6oE-bx8@H)@M 5aٹF { vt15Si[6'ib)kQ%2=wۈpN`Vd`0Fz ?h&(#A< a~v}]p2o}Ls[>`k=M'~yiw|9t ϩNJVxfH!jR;ɥV8 D Ɍo-^,B6n Ck`,R M䎲ԃB0֑A֙@ci$11&y+!^/PzZ~EgZǀU4Jwl6(̭i Y XDsXBy !#gqiL:+aZVGJa\p\f"T}oB:L{~'AVV76NCʺ!9쐤nH 2QOwnSnplވfD|Q&rLydPTaj:ߡ8\wRu%rp:ƧѤZ*˕W!*m/1C/0DY=B_ɠY$áZ8C .Fi^w0w&ߡi3u㪊waB,m6W]H X4׺(e%N?_.UƻEu>D۞0cJZV̘utaTK=Vi9MMN%mI9U{xV%t~L직4=w33=6_܋_fX~,,`tyg3xap _yvVL^M2b_gqJ ^T.@.ư-.C0Vpp'^3.` }g0%.ٕLd엉)t72\;\ ?1zvyZ1y~n&M8MVN{w{C(rrMw޾N[Y`Al )"|O7rǬwY[s'pj.}?&syC6ngX<ĵ9Lv"N<5(Eɂd_yp[ _QrtiZi4`֔?I:S9IDA3c~7*l꺧{5/nx2_31c,B]\P|wfr=?lMU7Y~bƙP3AH*{|3FLk<&G׆D!;y# $Dulp~_A?xP[)x+7Y@׌sg#Б`#GD3`~SUB8NhL.sF4NTIc D!wp(j@<9$$V8m|bXR[yA)ıH1WtU UwŰL9δfDs&I"L.7]k(sӥMbYDijXfI$B^`Q 䌅=KQG.DAYu (WHlBL&jZFe)A@xma8z5Xf]&NC惇ՔE$ G'vE J0U-?K$N(iؤ)6v(|PLhBJv(o:* X>E$T )d4&jci踤|^MUit#{/ sMJ?!2EQgGry%jutl\ ̷; :pA S;t>[ !@/aN(E f-@2.?U \+C͘ No$ZK]-Z>DeYCsL6j(%rnM Ȣ`;X+\$JH!fg3o)Ņ?3>^>wjݷh!`!BABj0+i )$ouQ"_`x,RDff1n}ScЈkΣqaM`pĎJ &e13GyP԰ע,(,td_Hר+gS>n۞9liࣅZgŅ%'SL]?2LQHyu.a&X@`Z#I%3=Lq8G3Q %(tXI,u`komD"91K CTcLu:=ȦQ ʝvX-"rXa[0 ^Иgf yi ]ǡpgbp>E{?5T\ U@U.;:1-^=^e=%ՄD"pԋ! )7Zʘ -&^!ڥuܗ"TgSǫ(#]py 6q>E [ < mi}~LِgCn䦨Y [%3G,y^::FdKˢF wqH4]Xn HpA5 >%efY>wb$kՂ5HVEփ) U̼Vm:$GrPh*z;NFe6Vi8 ' B |c@MJO.a`hQY,sZQ5f4J&=DHL? /(FcJe@Cr; & N%dJH[ F'&\ 0RKc|36w pB AW)yI8RC>G5?R3kտڟUgl-s"}t_V0#.NII,*-tt}O>9- &c}[\7=OX\Z#򐥰Ng\+=X.r*C1!z[Ll1*1pg9H=N <Ci Cjl2JKjm/R9(a;0!'e:qk=x[g%<&3ߔ5c={&=%IABPk4J9˓6Hsrq] Ӱk…+Z]_aI~L~*` |}|_EmZ,SXPNn]l5[_L0T n\7I6V`R6J ;iz(E> Pq.C}^$@Ra=#D֗Ɣ\J(ц۔W%Qp[T"-fN@읕΂}3rnuS ?=HB1jϙYGYBCZe BKv.t[10)VBMmHWLN$$($Hc'Gd&t,si*`i֞,֦y_|:]‹v<7>[I^OΛu`a_~ Oj ϖ}î8\(tokWEseҔ#r@ *!$ NJjFMcomLzR(i1fLIzеf쌜[ tg ;rЅ WO 2~}5If,G^Phr6:~ypA&T(Cv hd*a2Y/b,$VcK#n(ƞIEmJA+UDa#f혱Y(`2#۸b]sq01;]XkAkڸE'I9GxPds!NbIB'ِm W.x\C1V$XЂpRS e 01 XvF1KqI8<>F?vՈбFA#:ǤgԘU^Xf(Dqz#"pgQ;$! AnW;iTd'P۬ˈJg|("XsɄ@P(r[e "fxvh=Y>uv%Eݱ^ԃ^۵&{8'΀1lP \.ی 1F#%9+GB/EvCXnT*`K&kR3G*u\AEy 'iKԘ6(.0| qK99ZP}m,ޜΏGͤfnj,k|jKQn7NY5rE֨{In4Q I;[jz֧9 =y+tzk3^]iȇ cGsJnH͹؆Oi>=&3}EH6yh[T$ ,Uɘ5@7!F!.'6j!0 )xa1lKn%ИAz$cJB>is/AxZ"z1hpɿ%@ЄRu>>YhU1u VY+|5I&8#{F|BvZEZw˺O )d`%Q*e@\K2o-"h7Ebʄ˺~Y.Hd%J0+Z)5Ƅ,G,~97éZO vw̏I6goqL=>zv%&o,ϰs$8@'>Z#t}xbJ~1LV|eZy;:Oeez:/~~EᅐQ[ [L<&3,.0slPҩ:rQ i /T6fu{l5)7b cO1Q6-`|K.{=Ks[4g/"0_ SȐ+2O%Hx&L3=HsJ.*i}rV߰|49|eX.<ѝ{؋1UdŇ#Ĥ93=^edf!TV0'TBwZ^fYHPDAh8XExr$IK(R:x*m!4NlO#-\XoUtoT1V'w4(֘}( 12$߿>cBt)Y, 4ixt* SF]0:4ӄA] GԞq#<`DT%'x`c%7V߇cI`A/0SLR~^/K-Gmy_려4:J=LKKg?g_l~^c"._\fw&0Hդ2ħѹwb1qٗ΢e"?͵9z' ڛ*mL|yܩ\çF@onptjF3w$gJj~ydɈ?7g:5)nG=ꍆ?>Mtr5+ᲙW2`'c ] z׻th1 1bG<,>5H}b׎nCSͰcyc/isedu,FԍQHCdM_K M ҭ-q8suoq%sHBL]^ͺ.|%*iu| $wu>ϟn[aY^d"y'4 khֻMQ:z'9BriolMM+ S]0oKS#so-y3)^|l/U\[ Axr4L^Z}}df||M/~hnK_ކjjSwMCTuhYؠ|h1ͣi]k*!!mQnF>M솁DTvps.)i0ɢI)>d{賏U->_52}8'<ҨiHY%@/"AR$"u%nY^s'"&08?:-/ S|Gw<-:= :oG񦢄{ >w'C~ͮ5 LkJhH9pVUT1bK)]<]2m5\(}څ46B ;xAݼvj@sJ>K,h`[FEL1Vz(9oFoV0zʘ;]>",.zQkB\°7vy!B @^zO LFi꛹pt9܀h2/;WcKqoƬ子WYq0_ZkͼH967i=Ң\CZ(f(>;n.UM4je"!mu?K&F:ZKo$A0tY 2]z XZI\VJD{E锨EIyF;Uk\qfA/3M2>7C7W+޵ö ;HPiHqwv1G? p* T_DNH*ttP1j\%7 e(&:h4`P { 6\&2DyqMK. IznHT"1^0*j(X L,Gj9Gp[*PHV'@JgbgUeR ?rz:y.8!D-ؔ%!YIt %m10Hv.hH&WJ6Lz-cD3.1"kML(,-p2S#,qb+-_3EurKg :S:p:7xj6}~d{oll&ؚOBQG䛵^Dm 1,!lmitTj.UB NhE5Bi1yO2&2T=r. hRNCT(R)zPZ3cgŰJ9DzR+㶺c.W]8(mzz1nw$L̓4`0>_kl\ADyH Z` Tك ~%vD7nNp#y?r#*ޕtYWO/K8ABV QʘGR0sK|w[y3>xa;M$9 XjR lH5 4ZU$Ed\їBH\D"9_FmԚ3FgJ毶Sg B Q'GϤz|79d?044Q\976؋7=4䚰_}ܨ([L"1¤@g8r섷o{SWmpsXn#S쯝a^+(v% Go$T>Bʤ6ǥJ(!Dt|>HTĀUXY-EaNeh:;(v^%ˏM)7i%Dڰv j/*̻plG׮Gga5_LK@8_El%:?Lv٪ *]b; ҴY:p{s4qhLƁ{OB 8Au^GՆR31<:*p KeBCYRep 8<yf*AqB)v_g=9FӴޓOq>HtzqJlWt#Y9_#ۘ$u}Ṭ<0K|*Yߨ:-(“H%֠C ѣ(kBzA˜!;ve˹V:ڳp*;G_fwnwI X0(|WuMA-3f?!e|[WMq8Dm[2>5AlYaYd ʋƻg-1Zn_Lovw΢ԬgK|vuz|iκ?ko lPyH}*`?T[nmXrշղ`o@|65ha4ySf/9?"\M07y ^M^$\Pưe _s:ҋ1܉]Fﯿp7n=n\ 8O)EZ}Y~KC@_Ymv3?x91_\B툆3j݇IhtZܶq, |X6{ +jjE{oKӯjϦ^Ck9{lfw{M&>3'=^׀=%T{=l{ؼj].sVsDq~Dwo~s*i|]݆Jʓi _!Re1DVR S֝4vHw4 GS1B :'ċHx2)IkI` UN[\lf}dvv]WHcUX3vdBq?Lv;7X,=\P'֝cg=1g ?} 1R3u c<#N]ޔyE҈x/o`D|~/\rj 9l3w^M̝7wK'e':ob,ٻ֗ǎl SM~jq%?>Ћ{r`qxX+*#<\Ɯ;-'$q~IiTW|_POi cnJD8\e 'BްD(E-bCsp_Mԛ}mӧ^y|Wi癰{UV0I`Bbc@k]^ (ɆupD ;p/!vwS <1bsul.hxD/=B0:iE4sh*i-+*; ^3G~ {N2j݁-v{o(;g/xOVpM#2a\&e;O;!}I 2NXC֦SQ*elK<$ɸ R"ŸbݻbߒZ{ኇ })vSLީAˆy Pc>їѠ }Ol-~s*I%6Tb/?c~&z?⿎5|EK՜BvCwytߪɿm\;2ƣ^69:i4>(?{Ƒe O;ЮC@>L`l3$!Z& QMJeX/]uT׹9~36ӽG/~V|b nHMZo|[/A.Fbd* _]<8e[|WLJjYq3Ւ$lzN=/_NVeuDW|Rx(¤þhGw.*S%SԆc_Ќlp9ߞl'~q!,G?k~&=+)fo.쫯f'|ϥ!>yp΅P2'ߌ &i[ߏ羿9٦GMo9og^GRvkn&1[-ξ5o {˅pF>2j?A y1vy{L kzP9&`jVoԨOӶ[{U:\l>MtW L'6a¸{ՇGOܬ\Þ Mڵ;62@7j:w&g'1f;#Mvh(acSw]I[%#X.Pu]]xn!A\^ J<-tRRZ£iKXZ.<ֿde lٳz\Ӂh~>7i|>I?Z }035ٷl.tvGZ<X`isl*#čo׬~kŶ5(2O%G_÷I}xA\ڕ们J]Wo/P?%C׭M@oj~?ZW0-Fnҵ]:]K@M4o;"|0{OoσL!7+q:oNV;ƚ=f G[a8g%S\]AC^~ାp1j@Td`Cmfۻ_ኧ3u߭]z?hYE\{V^=0.aXFa:ݼ[yՏKg?"59ȋsF# ~;ɑ^z(IބހtM'KqZ&v8]HW!-Ӿ~cy  yT=5em>5$%QvWig9&pꃦ$6F%6tfUNiQcC꧵HU) rTB4݋R0w2 J&—ZkbB㦴ַZ B2V1K:ׂɈҘ&nFߥUS)|VnZ m1js[G7PЖcԻfVH)Zc4 cDLµ4=}&Tg̘Ʈf`nf :#M1E{) A{pGK83oYR#,C똕2Wqiv2 m7 *)sT [BȥaUV{l~%B!}tB$bu4bIM:~N@KE!ո^H13dɺ5}P'Ss.%‘FiҪd :JrJ%V 0 CMZe&5;g@Z PE"LԴjPyUA>xmBV58 U8ni4 O×Ud 2inuG-82OdP? "|qgE3#md-ŀb$Sƪ`|>ܮhuFP'K = 2Xkp`DP(c ޣP.O9{^Ls@ QA"}Czv@ @ R{pUf}J`@;뒄 X6"}IaFI*ျ!(A/Q[tCJ/RA5* t#"B[uWzҠD+Y.T@=ʀ Bj0FQ-B>AQ_=ZiaKP>.w؊PD4cҀ'7(3`Q'] +U 2=PhQLj0R܌"2!0xPuJc-0'`ҕȪTy19Mkﴦ)gaƑ=XV=ؤ=ɗ Iu20Kfj)څ}H:'/Gi:?T赫-o#j-*awQ88F +(|4j+Mf@ DUf`J$nÌ G9>X2'IJˍ6&TS.*w3,H&"ˡꘅG@VRpIFix%@_=+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@_(cRǤSh@>x%@_[+V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+X J V@b%+\%ZǤ=% (8xJ E(.NwV/H4R%߬~mOm}icGM? T_L-$Yp`Խ: MK\`^CJ-pi~Y(p_g׋}V4gw+ŧ4&ӶJ˓'ÉǗ'\,X"4Eh,BcX"4Eh,BcX"4Eh,BcX"4Eh,BcX"4Eh,BcX"4?{Ǎ_~0ܮ׈]`GQxԣC}~4/I=n&]$UztAh]ڷv׵̮y֕{=,;7wp k^N# KC3r!R}BL3-]zߘp3c~' lXgͫazszFM0AIi!?$%/} 6(¢pnۡRpތt0w@h)|a+te7\T( |x!:XqZVtYuCeݦ` Э O2 q:h++h%d):x~1B;A 9T38)45 JϗѺF "Dh\T<2ß|~/Y:-K<ق_Y x=L/ovq8 I?xʏQ5]Ys%'6aܴ{k7: O7lʈn|w͠h.r_D"nmJ)/]^$4Ʉ"$ Z|y;hJRQw_z J½??oDR1KJhR^TzƤ~_l'#F7g/Yk4wC!f[v=U1s/I5YK|k0^;Xh}xvwTBBggw \ !\*(S3P%N.N\[ At2z}O;dz|,~;AMsR/D.p2V[lPzW-b{Z֧& u9oSۑVmmHFcHyF5淫eGXILDeY'8 \yPA9璒,t 'שlOm藵\Tp<"e'T s^ֿ,wY_\))FMC:,Qxx̦QJdxv!ZbdzH]Ǚ=ZÉ6pjL>\4ՔyuKw >RnPu/Ge *~'nYVz*~Ϊ8^0!ǂ-R9QrX֖">QxR}Ra?*"9vUPE{vV\ \ %LF0l.䞍9B0,LN-¹- b1r()v}VfnfVJP|RWCiMÏGKqF (?>]w5o7|8.qP^|"_ڰEg 57QS΀=hyn lPj 0ߏgǰ֜ɯW4芎AA E-§?Na0D WtHvcD-BD ι/."'9w)&32D5f~O==G랾r{ҽ"|x*ް; lIL`똵b<\3#;no/Cm2xU\@ٛ`|ܸq/BeYR ޡFSfRQzުglW[m ==#n̢>^\vŰcua()1щJB@*/!%Ж*c(*QEBm ݄ 9f1BFGu O<Эv{`V9}6M!k}L*_oٴ)+ 0s=rXEt1yB,!lm Jͥ"x(0Cc@a'`V9cuG[䌭3'f<x  MY2>]RXw?d0޼u ÛAsl\!!9f4uY'LRe  lR˱I{nT\d{8l+Ş4Q'툱 (`\ u9vk~X%XvkP-Zڢڝ5% ]+ࡨ@'О2lt\G 26E'c(٠nxzJ"+P rT@b('#&dDε[g7?YKf[ÏrD2GG86נc>+a3KBE8f}.Fi$$ $ii %Diq.EmRKpU0܄4;MEn-so.#_ q5,y(_T-EŎ/n5{R"SIg t'oSba(%9ˢB// ?l ;uP?KNY{U8k#vt(*'=:g~r>~N*+*$Ʈ~q?jz{ "Tsnz=x'WBdR O\IEb I,0'2duP8"vF_M<*7>ffEo8J;1D)Q?o?Hƅ $&e<,IF[Ore RqXehzt2eqD8C@ȤAr$@  \IIt&_:.1@URr?!eo{ػ]ԽYw :]լĞ9?33dznLJmoֺݨ>8Wd y6]֢g[2sԜMur>j'Ɲ,>Ϥa<#ʦ}Z\>FDHRp0DB7}ݒ ɵGϬ}&^DPbK,gH )g$ID-j&"qbP1S!`QkΤ3%sJT unyhxɗ^oV %Q𹪞|rۘ},Ne B`;DD=M)DbIt-38Nv 6W̦R6@ZCAKJN)vſu|7ϥ(McEz}-Zf%G3Jw3|/4%-s/^㓿i¥㣿pqvp_?)V#x>GA+8,AI{"]T)«=yaEg\{>V^|S  Ul[GMUjs:p^|?KE< ohb^gUFiySE>0ۨYkBw&Q9~OHֵz ^޲X.nOzOhjV$/f5=xF13#%HfE\ka-xЮI£WnKǜP3qB8: [̓CW=Ɨ[1fcb"E.L2AQ uojZ p-bCbpP:&9Eg1 j2C ^xbs|Wڮ_JΧcZUtTY& qou!z Zjk( $-\8A8B;1g` d`U<"Aj>0\U'fN vvV9sٳ*o%pc<(kc{fX󁧹bX_˜Umf2+8ȿPؼO?GyOY{m8ſzyyÎ\ ه ~i_|a w#7Kg9OY;9 { |8W8UϽ -xOP, SQच^Oؿxs\GA 9}BQ!Ep:?i!~fuJ)T ཷ32yqTj:O|,#"Ar{,/\(|U?/^$J*R.iIe%!:"`ڿƕ*/~L?myOc;yp8+"gJ lTz<0+L+^ʫA:90kf݁!G1ܾ#~҅ʚo<0yEUQ \6S'a2yn_/jY|ֲSoe vtќ]9'fl³I5nK }V'7:#b)-z}n:͙s*\ETOp}Ұ,"eմVC:30]U)]tk]ݫ@ oZ\>܏'uS(#mk8QM`mɵD:1y" vt@bHGm}ΙZfVnNAmiZ ^Z}npˇ6/.!riFZe j͛/ɻyicF1NܺB)2bh^4D.j[: S}]!2YR=2B^΀<锄Ѥ7BF4Vp[k`?Rg-ךROmZ X>  F Y]́lp4AJ"?.JeYyb7"g5^GztEo^}tqёQ+JkgG;$NE'$]fⲴRr 92hL޿4)mPRZSjՋ$HQi 04qH,\>3[)ӓdJlrŜƆL*2@I 1jM/&  Zڛq̍} e$}Op&ë_E7Wv/]&I%e! Nb U`dtD9Ge֡L O*$S:V]!y.4JQ#r\7AwxpV[8Q!nK)f] Ά{6)H)͔3 RDPQ`)֠lq*rVV4&~ כsYH$o^뜧:YO9wht—˭Va1$U5 zU F9tw:32ucTu2Âq)$?c*/GQ>$rztA:6>o.v:GbW\w )h$oGM'>[w5%Ӛ`+u$  NvH!OC*zT<CF[nC@GG-:8c.QJL yY.eY[ύEpyg$2A3⩣FksNLIExSmpSwPvHni#}IƗ8-#m>vHG( P; Y5!)A*cTG[-z%QXc1kx iA= &% Fd <-Ms%sLh8E2 Y4Bu,eDMր>(Bϣ7Y}Q#QL>~G*S|џL_fl=\5LHyu.q$ T:hb6?ϔ 癍;kӈgF̨*9K(LIMp.PsA9(V7)&)l5MM Ux㩎F&! "8O` yggVq]nт-=Rz˗ᅥwׅẮSSU7QZ#gjoֺ 1_=_5_W>σy>[2iE_UyE1/gRw~IGǫC#~Xȃa5Z8W'߽fEDJT+NQ/*d2}p0.e=V[p+fvқÓo$_* y{D袭 ^{HC&;afQ&]S׽6̭sf/U~1;3˫`Z\-Fe|j<̌oxwŬ"լOIݤgJcex5:E<.tNy!ZfZc2\nb2Zn2J;s ͕4i9J=*esrtsQ34W(47WjpjBЦ 34W84\eF\!\AZhYUFIgbT?…|[ٚr9/=+^.m-T 'O[D=‹&_槗ᬗsp4/qr%e%'=PNݻ?|L_M;nz׸Wp2gƿƒYۋp2uN.%~`B0xaySN(>U|ⳉ{:xyB;AsmJUfϹoݫ'IP&sJFƆTz6o=GLGfr&PNǣ˴*:=Įk5A +NBikͬ"ȋ}WQJ:Czwk)B*S: J`s*껳,7_yJ+ G䶃|On;HYۡԢY“'S"+lTp\eR4\eU;sl#Zs"sKs֘Ц s4Wj~g ۟{sᬻo%ο6ƋL^.ʣW^٩+W0e?κ\SrK ז1NT(39T1RuV,fpn*Ưe2UIVh"si| =!VCdw+AM2 \e f4\ecW\I)-2W0'$ְV7~Q2B:s ͕bRj#>zVp%mh`F)3W\iN(mS:[ekeb2Zxg0n9+C+6\]p˫W',ʼnɋv[?^]T_gqO`D 'o*%;t&|{ R~оCB&tN&1^zۅEJ+eJ il3Uy2DWXYS 5*^uoNXS]s5-I~GW0]8zl޸ ˈ$Hwՠsu?W5v֗hi?)lwo$-@?0l7=U=0y bT;\Wp`2Qf$u mm"femL+²i%훛dQKiʗxaqN-ZFgقdpq^|O[~p!ꪭMmRN$.}b"|z4CjLdĤ eQ娀I F+BE\qT.ue˅YL p|vRh^%e|)4peEHZD.j[: Sy4x4w$3jüOzgiK!L cBɀ8'0;Fȕ*nkkM'6DB-ME Θ ښ}3E8|jcf+=[&F+ǎ}TYj ԑvȕiɽ%$ȃ-58ʹa» v/O A%o(=˖&j ŞِgAxB]"4F/WuY"N0J /ʋ>2բ$n q~\̱ hoM :Ý J*611QxcND%4 $S6t !M0t #:0BMsZ*PpEF\6)I@ cwmIa~`v\]`CꮖSBJ DRm̰xUǰgKh{NGzY|+CnFH>l64wk+7ȟ6|nOxȸ$+ x3bY ( Ifl!yk]x8$K<' d;25 GQxH%j3( mj+PT;f[Y /pŀ}hfϿQ{Z&R.;.E8=#0^dF0$ܻxqXzާ'|?9`쐷pv^INφ 4РC=&#&D3>_1kKM'8;?3:<#E<Ѽ7t얠nvJ7)#2>n?b</8qϗAl]\ 0=؆5ΠnE#^χVcӫi8S_ -25}|z5ghMr:8;;~IlFlAH/6&҇9]'Isonދ{6~.Yx,F)[FV݊?[,E&Nc4FrN- z3xv1-hè>){Ǔxs5fҀhn5;'촺;1AoIfŝJ.Rˏn"?~<ʔ!Iu=Mp${{\BqbM+vr8+JOEZ\\;YMXE8FEx_:]6mh5{ҥM|qE"pܤtM-ڬ kÌwbfs5qn՚- &֝K/+jG՜We!@c􉉌2&k)$ 2%!bQr }Qٞe//ҫ _=+p<. )g(UD,YE9 P*eRs2bwzU^v_gw5ݸ =KgbfeޣQWą{Nlewͤ [gYOwO?iJ=q*J=kˆI=ꠢ7p"30h hX`"=ԟB(PF?5~2Bp3h]!%J9x\2P#24OU!Ԛ )uL,k gMV(`'b k,6&fHw SL:i/"ܽ_62<`}jB@CoѠOƑnz'fA?ש:yOAy_]%y(?[]򖷚17]^o߾6b0WU. !͝[(z*G}x 85ZNvcpp'0HgIIq2^iLFdDq dX%Fe(r;z]q/=^{ǽTldQ3!jƽD%' <(\hǬ!JxT!Uc6:s>qlN9UGCo2!u*$*( 2g k0+jػQL+Ť?f1 YShx ںZy^xXA0J,(Ę R/"- Y)("Җ4PR)&%&rBti;9eHbK>ar5q6 l2ƫdDcFEa'M4vT[^nN^QՐo9^4]ZdTYzf+,cG)ɘb 66`YVV(Z м,|T\p FPb<'ϒl+y`ф!Ho)c)NXmNr0CҥPBKΨ]as&K!I7_P:,]hB]t.5iVu!rneot&愳9u6 h|]̮ Yocccީ)Y!i‹dhe?#ocʄ2:XI}=}կ)؆m]hXG׿x<4 caH'6ix޼O^X `5RJhĆހSJ>Sv/M^ں꽊t[הBϵ"5&C9#Sg椦e# t]2/iFHh MzAݩkczv<wsc0]v۱O`3zWtby֐~Km4VftaD;O l{LHːS;CZd(FoynrT79tFt ro@o`B]+g<瀵R(uLA[c3EL%MZ*$-wVc:Xc`Vwe9 Jl1AonxUhz,"Z ̚fS+6(q¢I:#TRwɃ~^lh I$ -c!f X\GK& (aɅy 6`NZ]za1{lsR6s:|/I' 80w6QRW>[vG.U攵=OϘ)ze۽Mk+ak7f DUX32++ƹ%z0}6erVb2dszX3V ajf|z_/ 2(*1?.nN']h 0M.GçgxbcU2V8Q+rD td . Z`Z[:uC BL>UQܦ46*Y1P `则Ql\8=6Li%]M;Uez#uςdF朜"ZQL 0M>RPm}NA PW4kcC*br\QHIw Y4pR4Gс)D9!T۠~YӮ8}QWG\ aL g.he6@aDPflh4K$D sϢǬrfŘB`Jk-5ɅNU\OÎ3x%Ç2 (HV*aRz)o-l$/ j~w22. Y/4ÍO?, \SfiOS؆NxRXXXRCo?%1^-")5S3!@Y MBo}}ѽD7mW ^3)K[ri%Ze( 'zZAz3;NwNjW_|;\9evi]E Y~fґ$3LllUfL7}ꒆ\(* Z#PS=oktGeB=e˥ N+fb@AGJ<͓6֏r*5 <9 ƇV>-}hBE5W0nכ9xu{kX=o⇛C$|9 D @iNiQV,ߧs\%Ǎڲn~\eKyi#"Hl QapTԠA);`qRRM/*e{g!g1{]De$b$QL1xYcH8o% ;bji&GOӯ |'rmlw԰:o'4 ~,9&Gמ{/64\ sfQ21*(rtP9wAT(s8U>b  B@d2@i;U_V'X zʱtY()V)3vD b &T xH![#疺PGW{ ;s57]v6ml7!Μ/-UUz۵TbR@@EQX: C^Ox"BY@W̠\p 5*k _4wrhQ9",2RP7[#gY߀I\gl}S5VusV}zEY>f {:)hs fAP\V3  (0RG*djRDPd8qm?^^?MCBe.uvRLW3-a gLhίU]ͅ3V,~vCOSi"U.f4\G!'Ӷ@!8vsr194fU>R$;e7#/TeC8ё_TgpOG5?KZK@Klaϳ=*ொJiIӄ,-eaj PWOd o-#@5Ul(?ԠaB1^ioU:wƽlA gÂ.6[ TK"*#gp98mê ؚ<hd lUZ5Ig+hdQ&  2,tqmI""kh@Qեh;UƷ`l=t.6IYg2얪]UeNFC1GUnN~TC ~]L'r(Ct^\= g1RiSf˽ھX.pL V1@H ("ݮ',Rg2.=Ay))qJOb΂wDðqeAkl8W ."%/%^n,4egc ߝUs4HG0R$%}8Nx[R#uoJvMn"[n1hI^VDvJ)kڴw󸂖Iɑ6(2d"[G) }pP<hƿN# ED0 y񗔺x: #/?~ ?./zM@.J@Yj%ZXMiTAR^S𠌜 vcS* .TV2?An(UZ(ܤTݼ_Qu$*o~)MmVwOn>Q14Jq-m:Se/ף ̭pcv5R"hF3{4svۓpu1|[0tH# \Kr1bMЁD?Coߡη+x1+yhmtrrȲajh~ ޷q+4 C˯HLGr%_z5D%Q 5@l b7zU7y7w9ש~Ëb٩-)BTK/?83vdwQpS%Y΢=ZF0U|hנ%GBFb(ӌKmN8iHX8J@ Vȷ9T3K%h&(A + $GOlAê/aQ=;7`x$#yhφD?͎5M|pN40pLVp+f1̊S2#|=`UJC*S^]~]U4v"?`!8~%s958Q8n=-#緌2{tH- %tXԁ Q ,mZe"TaU:hX)Q r ) %m=3{ ;0AR-r;>939|܅\ 6zO]F[|[ξϾ ?{~TEYE1iP!.|%%M7{,ռT:rit#{/ }MJ?!2:vY\D_I:#KQ=/¨C~O!kw?+Vh13>9PF-hR't> L|Zl٠-9-rR ln%9/4$!7 <]x; `)D˹VA1L$ZK]-Z>De(瘀fmPJ%2,"1rbrX+H!mop*O~im6v<g"݆lcG }vwV;DЇ:-x]/)%姳62JDsLϙc0^*EH#A.a|OC(F\skRp:2bG%sQK@BS"|סVP JlCqw twȘl=w[|"+f諹fkf]̶LJtL5Sq|$0s nlX"4wԋ!rɵ7Zʘ nG\:w2f~xјsd"DlH F$_eB#(B{yy>9ðofsњiY>NlDd%QcDFXn,(9@8Ȥ1HO6_P;H("B$:& Z 3âҌ;d 8 (O ¶*S(Y]UZ=\{ra %*N##`: \o`r)BF네aCbbiI|3vh)A10*  )S"44ONu@7Q6Z)E9ju,n(IJR% @Sfj'e!X h0'X 7J8_nAe?^0UfY_I~M*C8>|~ʇ)]ٛq|?)pxn|J0Rs:i4g!*d$]ыU ѽ5 JRҷM!*+^>+ұ@iӢڭYO'ik"v<7jk ??s-h|D"s3G cL\/SQJ`IIm4]o9Wtwض~f ku2f!M#/c8O9s6WqZ}U+m"WYUYRK> ;zHzW[ɭ䶶iv++v}m=G򴔱|wrQa'~{YqVհ+# ixN x]2EaU.S *1<u4;\en)Z."JDZ)CU BDNJ>3Ze|3Y3nЋ{ud'>ϣ|D|Z~(lخvHҮE/7*+fS`R}U vBRUxShcenk1P{y|\&0\ IznQ7 фY) =/zct1*X04ȅyoDB:Qt4KYJ{ gCJ}H}̿d͇3t2<8"-ؔāHVR.JeFmCO/*lі,MΪ,D *DǀBD3.1|kM VbWx:So[ǕGZg}92Ey%d /5q [MǷ:},i_7][S|jןJ^ڢVy7k.1,!lm%uTj.٪! p0ZĢQ'y[Xz)1(AGz1(mAbJ9bY-,L,c;{^dn6z:oT~_Kz}AiQ $ f0IK !rc˵q%HZlnx8 q&WJl 161a#:X b"K'-%z퇃p b.6;ڢn=ؕ@3vS F@LSԞ2lp\ Ȕ6'c +Td Q#[YC? :1GC}&cv=,&z{X\n 1CǶQE\5uT1-OԈYb*]pIiTT TfvZC QZ 6RpjҔ-XvFj "i&4w 8-⧳ڣ]T3:bd[ E.<5{#gYB:Έۤ!^,x-o.bc[{ C=܁ L\w_Mz(r5 &An@kُ~,<;߭>~(AIg2DJD+R0%A>$n3wf67DPbKGaS$ QFTEQDKf_Ѕjr@FmК33%-T{*E Yr:(]8]G3)7x>d3ghqP{5ƹF/j|ok}ܨ їD.ND<&%^[Zzg49uU";ޞIUKhכ j /x qw-y|#wp 3٦o\<~uZ"1 +'ʯ֕Os8 4V#E9 &MHT0}Ejt.$#IC{ Ժ~:DfD%ƇrT*p/D[xK8FΏj4~Lp>{gBܾfYPBwBz)MknGSvG2U{% y9*6A"JXcxpT`I`hQ KeB 權Re.rLq6yy3 ֌IPܡR% iYwY@4-+Esye2 ݍ)Rr\"GBQTl>J G4!IIކ,!pHQZx1Exɲd :@cZR!=Qh@3xW7mε9מt #wˣl]l޼rWBm2.Uܸ){39B1?}?֫Ltzf5Ѭ;ĖnUg+я zr|oy7uy9QuvA]kwԜ_٦P-'v ۟QjbG3+{q3@L @)p3~I^?_' &/_w&?_$\Gi/Xs_Q߿H/aR}wJ;'=LJa QJ# \nb/ (~r7j"?#=]m]:kmExAy[xo]mf꾺[_̦NywvM9_4x0io|Gfmoprl:8;_KQsDqgE-][0H`N+f~\ۚHfE'@+U" ~tr.CN:C;:v~*ؕ= fj?xﺬ{2kQg B}"4FPD7,0J -AyzQ:$xs tn}>m^ΦO%{]ǬLX˽ *+111QQKm B…""<ƭah #OL`\hU?wB2NXC"ZMkxxzZӳYC;Hμ`I `R5P`FI 9Q+C5ЙsrSPTFfAz-poSkoƷ*[9-fܸ OB)([9'LEᆽO"Q&7tN 9ug$_$0tǝn[ܐNza?Q~Cp4 BF:/ ?WvYͨ3ײ5odwwr $ ]s3z NިK=bCݮ b'732 ]im!:QI/?C7ߦ0·'E:IoF>F RrĨ>GtݷOd;>_x1mn㗆pױWX.:'yPD2~TFEY¨w: a `w;]s7gВ?_r_[|g`}{5>K=sO>v`u;΃r'.F`i]g,t,J-1%i&nE</y1,F_I \c0A:\I ni)'hw+AɊ |o.LjRIgMylz5/6dS6!5RM~GGml}▛a{UP] zɎeʷb`&XN]ͽغp|5ܱH Kh$ $ ۄ7N0[&4plaŽN٭k? Qbv7z==^l =RCtr42 !X!%xꭌ6@T ѿ\ !j2@NoYDe6'"ϏET(0(.zɯnL}{?~ik am+VJ J էuj! *nDRƤ*&qm&QD5ׇQETo5b;lJ 5YMP{I $I ,*ަ>TIOK qxI>?۵TKtbs) VH͒O h/KsYBܸ|dȢ6Jm `KQbI6܋7ymSW>v3NvUbil p>+T)3{+`Wj\) QtZ#E:}BUupPU= ݪԹ2dB'?.AFEq &yCs٣|-¸7ܙuSA̞eQly NED?]oGWkc=T=' ؽkg )I,k߯z(>FX5ꮞ%a𨢚+&9NxQ =^$W#נSMma(aB.DDe9&I[JK߁"kY"M:֏ Ʌsc̱|>ڀ5sPƎCF(6n4[Uh;j!Z[L1ŠrDy2Cu+/n*D*to^{NcR "Fkw+ Mle7iòS6GlqQFͦlewK_!o~w rz荼ug+vZ{ƾ'יa.=sgnރfjC iwinZmx-NÁ},mKS|H9E*sD.MtmXݥ[ҎWwG83i#̯H"Zr#*7A%7۽G]Ųfc'TRo.8fp"ǁ lC:g/+Ӥ8C\ n|gӌ5C˜#٥-5 0Ƹ3+IZzIJF܀g v5m%Ǖd*8:>?v<g Ir2s!z2_eIyVY7G08wxauZZ(qK&ɗgPɗ:Ɔ2 B(ՊLyp8j4ڄ\3C K<7̫ⴺKkm-6.,5-4uYǨ/+)Rc~<>)Zeo0ņ:X_ɰnje#{h.QXXvi]XDr .q%#$%HN5z5@\Kp aO W>񦔇I˞häGWpzw i;pEc$.S]$WIcTW_r!ɓ~aJJjypEWЄv]%ewUWHKU!\1;WIC*EJJJ*vpԴgW\RIq* ,î3*IHP?wJPksWg[lj7TMPtmsl:CEp,&T- L PIsXw% UKJUG̘=U{TMJp;eWt&L*zpԤgWJE4D4 \( \%i/3&)UJjǴٹd\CLJҍ{lġ7t Z0=f]v5_!Bu6І$.V$-#N_vhn8z4GUacݷՃ%s&-y"z&eJpcM+X"J2JҊ$e-2т+!J2J }pTgWT+ +ll tB;\%)Voۛdrjmxy~C\^t|Z ӻI}^"ZRZK+Zdxt{FeNCoygŕ/Hh *.K-`O?NA/N+k,z_<̲/tmҡ7K1hF鑢M7okz_|,ZA*DK}WKw*eEv5.bgT5ĝ2vᄑtj>P봫ҜۢPWz27i-fr>Ne Pz.evag[cR;ĬW逰 9HysQ bU{ a~0 n ~vᒿ AA}x7_hvp]2KL hC6$]_q6R 5$E}:@AxW1C_Ɠq}wGcSSjWԋ¡/J*PܢzWkT6moɅvbGG]? ?EUg3=ح1;]QQb[~?n+?GȩW~qWsgv & Ow4ۇ*}vG ~ <1،JfM; l%ƙ(`/&߲BZsO@7JTO,8~頠rvvz i)߉MN1Y4:xSO):˾pꗲ "]^-Pn< ZXb# {%@ÿ]֢"( PcԶS{ȷ~ڒH!Ѻ0d/"G7 3 P&rǑ[s; &mR &D=nh8Y`B<UX-zg՘IDk1Dk45[!-'g%:clV^@ TgWm]^Po3#XB5^@/ 5_=| ̂HB9Ua[)r-jk6p¨| Qtv86ӛM.oi'M}=3W橼Otٻm=lSNo.'MPue6Of%sMqLfK^_ؚ;rT~S֖yw5B\sI@X8SV"}:S\-ɫ A1g@k[g72UZ4c_,-c!XXI6JtHb"bg7376w~@nFh8㈍q4E! N4#0[Tie4ĹhA7`xh<IItY$R:BDحdk!池vkڱ/jQ3m %`b9 -&Tho! 2.u+C@[ȅ4*Y  bEH {LJiLxؚ8't:! bk/"#6AB$6B4D#e8Qi0hh)pvg;ӗ b: Ą3[,A94aK-#bkFCmg¸i5-E˸(z\qqrJ0\z'-F;Ƹq8JVG^`IFo%cCakڱ/ʖP*+l}Y<|*|N,_^,и́YѿM޲F^KnD^hŎ̙2n'k?Q61wI{z3h29Zʘ ~s!Sё%ASҶR3NmH+ s1 8 .@P@tl 5" Aso[HXG"\JlBTC2 ǨUQFYJy>%x9kۦ0KB#HD.MO ].z~6avy{7\Eyzj7!aSS$ԌXF'"G8:FdKˢxvXd$'ߵH *:\*ɴ"\4:& j 3âҌ;$,"0 (O!iG!CP(P0^E[|Nd:zccʐ*N##`: Zo`pEJ2Z'$ SI-?SrzmZ!ycK)A14* [)`97v iElR 0s\ܠM LRJ&聦̂NpB`O,ΛO-nH<ͳ~>?'۴Aq9}Q㯗wfa| o&Kko[Ԩzݟ1] c3KM4D&|Mo,c6ͮuc rg ~ 5.īQSg=ONj iҤV[VPT; _#>j_3%c8ˇ!*Ѣ4II$nrwxkuN].|ʤo@ƒ]UnƗn)5K3E-@:`Z Q͉~wt}m$벵01D|LM^ռ,t&w: ; %;NNg<ɲarʞڅʺLFM2(tMZ]0cXE:>!8 cUdſ30ԥ r(o *=HCjtaQ*xN}?j*f{($꺉oTWTNl|=8STٸ7:u[ 7IOTcU KZmz3`AV|9W/{E/X>ۏ0U"ԲUeآ˾l _ڭ"EĆQ⨰A8akT?`qRRM}-*1{Cb1H6""H$Hd>\9g; "j⼕4Q }99j}vztDh5ѽF'%5"z#"g͇Px:!t?0_ȥ@ 0cs%ΙEɬHUPיԾXjb#F#Q(0 q|9 @Xt.Q:dVaBuk'X :Դ#vcl^KqW8:{k'<앂7x,j^2gb"Aspw&#[96|Drl1~@4S-P̑+hdQ @[ VqmI"D!d ^jp7k`Gx|,zVu&\ownΥ|&ZF}w|`%֚SWjt b_[Z}!9.۟JUCw Nlp]YV%s(9 ?}Aͳ›+3 mQ\!Mޮ?ԙy9U҅YW⚻NbnkL`SsImMmW e2c{?KDt^OᗏM 3*gRbr[м}R&9?8}r˚:h{!ƚi˙S,E8d E$EٚĒE*L}A KI S"x ,s$# LOA'xawnzR$[hEzqۈ\}ᇻӤJxA;[$#TtYmZ%a@\WbP$R+2d@Zeۘ|Χ5_o[~LF xA]sQ@whn*o sc̱|<b &@ք8DCX[#<5*CzP:lp6HF7g)Oc.АXeWbF YpT.s_.C6K1;pϗ ݆O gwP.w}(}hNlɾprWckqUN9Ie]ag.M0"^1W45<,#z 77O'r5P6n-zH%>7TyJG'\% \.ЧIZӔ!} &:yO7o"tEr)\$Bdzr o&mfNӤdhFE)ҷ@u.ęW`Soj`JH}]ĂO -[uElVfA/z/ߓ?wz3m1w_==+䃞)BBI>}M?\`TI(`{W*!:Gcr[=٧Ry]HxyD!QاR,DT^̢trDrG OpcF@zZƯܳWһ!{ϼaWXr$d$Rι 4DNZa^Z 8JHP\nn/:Ti1vkgݶ)nrH_8$s,F#Y%u@H 4z67gg{2Nuxl/L 3嬧C{6̆:;e!(ͱEGτ88#ljfM"tpN49L&WbJ̊ )P;@y'b޻$<}Q}<.v{LO;mD8' vuHDV2Pa#J_k axUnJDŽw1>Y]o18Oʭ8sr{V\Ëᬸ$aZS+B3b$rD Jy˃R+` D Ɍo5^4B/yZ*)'i,( VrH` L1bDZ I`Lp n;1,X3g#i!w[b_V(9jR ^q,aE4!!}?6ZkFB uTiFKߪ<_QK k80oSF$z$F[OWN|L5#!FB*_tnq>iqtЧü< txVťa7 )E!q!UwPVR q{c7#X.tWfv btJ<\uO#2봹?݌M2Js7QWt{Cew̕Ud77]zN[cuάAl Dw{ۛa9cuZGP+^vʟo 3-X0Ī澞3~.'~ ./=Nnn۾6f=[v4'K,,dd}7_t+]-~EO{v,eyn4r& ('S>G1Nu+m')Z_evd"\` xUTդd~wc0e7NW."Z?m{Ќ.lj~eQu*}~StəcF.,BMGv7߻ͣt{nd Z,u=T'Ĝdq\A[" 2([uicUӤN6v|25礊Gϝf^ӎ4gӜP3Y@׌sg+0 (Ge]Z@F8aC4B#/2 .(bI9uZUz(4]C'jߍ͍oA1(u};Q7 ^WtkBfΓZg0deNI=M% s+e>^rÜ9Q)DA6ƒEiCև)eC (cur{Ќ)p4ZZ]@Aa-i"!QHG9DŽRn&x9) Ȣ`;X+\$JKƦ!]clt1qZZ٤ />S ` } (EP tRuPG%b(4Y;F [4j+\ =p1[,{ЈkΣqaM`pĎJsQK@BS"|E@J &T:aD[_4Ml0HG 1 bjySK C|+L]O LQ,[Π$Yu_8S9<;ؾp8 E+& +yDK[5~T72h FY0|+wa}gqoR0#4'(xAc81q QcB;E_&o=&B[OB/PU\q-TmnȢ`fubZpR|;:yO7o"tEr)\&D{MV[֣ax퓤syg"$Ӥ$sqE)Hld6&XKi8S<t*|xM Af] S*AxlE겕)gnf戀/} {WƑ@x&XA8x'FЧ55TxVW=9D(k&H3==_wU}]]ݳj0T>O[U>/NFEFP켼 bV^UV/}YNJ.}bwx6ft |:al˽w78d ]%TZNW e{]FjWL2~pLqQ(HZ"lCsP!tWs)t%W2:?}J #sR@|2r4yr.@j9ϧxv) N@?{Myy11@kzGM" *-4e̳sNLhjr+U˫3tny$XLjAKʴe)Cʻ3RK @rFϵ,wJKȺNwyvV0#)6gF\tz%2݈ayHnG'vyHnGq"QRکH#!1V+L;0UKt_*tPvN!D#J{]B}UBڗvCW+ Jp ]% uJtd0gUB+;]%J a#J ]\Q_*tP'IW JD kܟf*tPJyHW( Vμnrv6)@ }tPnW?R*ke.+ܬޜv}K1m~=fՆ}TcLܑu'-QnA*Θ*4n,ٹ]z6H^E9[AozF$nw(tb싿̳0eW]Tfٕ]Qm 鬾0 MH<"@}@$[S3~l.ZtGGIU@y䷲z az: PW<)}qk[8/˴_hE$ _hw Ab ބ/%SۗՆv5fA_S=/qd_a3W-/ZB;(&W jQBDW p".Kh:]%0JZs]=sLr$2%励Mwi7`ޟWtB; (:L>IB;ga}vm#ֺfv.ox8=`I#e<+ZHI['aćǜj{DW ]a}VѮH wWtSJp ]%uJAtr5&GWXJp ]%UBЁ ]1A=+,) ]%\Zy PC0$ A]`3wL'tP2~'HWB(]`ݣ`0JhY*䇕HWRbe F{CW .}VUByzt$cO+#Jp ]%0pJ(vʁ ]i‰V]^s ȴ# Mn^C::q֎q%G]8MyyMcǩvrs)T; };qFIkZmHI~ŹOOU46:78y'X}^iS?޽+τ26~W#_~Hz3]n2ZK!B%e/ϪUMKg\Ӊ /YskK_#t{ӏ_ϓ7I^4V1^P~~:e%Bt p_kV~MYQhޫո( 1"DlHǍ3ԈR/4a0!Ͻ],ma.R)D]fr>Oy PF.}vjC$\.%6!!Oc*5(K)5)zqyG"ɥ?**LK7tQ4j'Qegw.FՅoQV suXy Y[҅f~@\C |̖\6 Ӗ\kOn''IJVY}]M6NDp> uˍE%Q9#;I'c,Ÿ=r0A-c*+C04ZQu7"0QnKR[J6M j~{25.8ol'30M?Ns?Tni.MST&ukBNo Nat*1orRShXXҋ~J_Yi}E۳uJzLHF>Sg ^80n^˗ﮦ8Τ β _kNkE<<,O܆!l8 Yr{Wo5N7yr*>]_at1!$\PӺud'iԇhyx6UƪȲD9QBa8j|#"^S+LSLHQ\|'%Uf(Fv @CJp8</Tjx> V>[ڤJJVdUͮVċ+BcZm/EtW`}졫+1O}Y;^VUyVZ{hr)|2*Z6~Јv5nĆ"""b(qT 0D5!Rw⤤6C>FEzt2z,5`KUox;y/䐳j.R21 YO31xYcH8o% ´Y/奄 x:7X@[yW/ /&gPdm`:SefZ>_HuwoЮPvV_No|1zZh_{ZqM28;HΌd!+4hqRw>+_ cMuUR-M5$Tt:29C-=Қ&sf"۵Qh%aR«r!hYDuTN--YCu]yzD*WL/{BZm #ٟ.Fl %;$hN({ N&3tsʿ3гfDEI|[fmv}9wծK9wwL似vYi4?Ei: *ԦrOK/u] :=Are`rY4a#A29HysQ :cYڼa* 骗ih%rH!OOP.}(A? q&WPL>YЛw^ }onٚжp)y@ۭ_t~`kZy^+R 6ʏʔSvNp*.ͥ~bJnbk2 <}dKO DY+^6^ן뺪XwӤ>Z&@ĵ r B($b1@1gz4Ji$k׺q7d<ڡ3n\.]4f{Ww[۵b?z7z\=B"{w Vɍ\ 3]1U`.fc3M V&gȪ\rMd`S%;vgt|4Q]3M:gk6(ϔ')C%B&GzlBa$JYF$PV=ҽLGC+Caj3jX$"﵌Ml5ͭ= ΖiQ7RvRj,bnXW|p^2-,F:F­T2RcM@9RcA#< 1PCR]QQ^`.0 F" T)R"l{sO- B:+_{ӓ;~/B\̧ebK&s+_wKM)؜վk?LqVbwmtvwawfqwqgfy;dt(FbKrlzX,qnfp.裋ȼ1dc! a < #ATj i[9Vv%^IhjA)ڜ \+^k[jlJ=?Z3cW[XV^HTmv^El7GL?Nq6yt-6G2s"d)ἲ`1hd1IWtW$֦ŖVfEPȞlJhT0*p2q;f]%XSfč+[jla4(XjcW*[mY[RsJЊ8m Ҹ%DLFk!xuc61XU!rE\4UIh8|YGL&=&f{ؒѻ4c*X,bEԕ-,bgxH& JLL9cke''\"j ,Ò"ne\FP";b@H`A&HLZ(V"VgE|*ѫdBi,.vtvkkm qN 5D w38B.C BIމ@)}cfǮ*C]Z^ 2eZne6*y"L}JN .-vfܫKSV=Y}O?Y6X8s)"@mJ,c\ nsbU,3@ ˝d̆.TcT2<|Ed .RԖk"JEeXMБldt\EHvwyw3>ru83`_4~{ߐk*|5Qw H&A&t{t*QI!DżbI33Y+%ɢpȫj0:31Lr?6s~,5Ws{2t_z5u/MO‘m6g w?HP:n~ nnTvPRo'.z;q?l[IفJNJU*UܢTJ8ۛw A!dph}6+TnQ&$`VV'\t;]`a9ϣ\%%{cB%bAE&Y;gI,7Mp_73p,xblb-\R{N_ȒveM->N7;'m+Gǎqn1E`P jD?I77*+K8HƪԱО]Ց;vwZOi8 .+އ4q1cZ4&f&EƨO Ъ,"92:sPއr(61;R?>.Jq<[[3~o]u5-+~d1P`)ၡ.e!6itc :gy"s E؋E(sy1;lRSdI!t@,O|'"mo29Q7M/4 1tf @l_Fy濾n=݊vPQ%*'/Ir%G$,tL,:T63 9t2Y1JFD-?8AQ/(Ҋ_'sG;K9Ş/k|pRLF>Ӥ[?5};}G6hrMO~k_{ʓ,\g.eo{Wv~o^@Oh>e[_y+ߟ)MThOu^;9ѽaꟽ )0 G:ᬵd/?f# 7f'pLXD:4f{돯S~R/R7eq?_ns2G~;gL?X~N/N~m wz~ޏ{(]\yXtØCRZ?8}{>igmZq;2+n?=iPfoeG,9rf/1 ?4%||wQ)w i;Ks?dd1eﯱ) + \cYJi4dH.߇IOEKAw1)*۔W܀okIɚ |/LfsioMelnB~_z?m{O_}7F+#.*pz-ſR|؟M,Sy53%j",e W[WN^']N9@{i^a`maQ) t>M+ [&zXvqSw/Bvsv͹\`hO_xɅ7=z`A't$FEL&WQj%& !3JCETgs}zd^D*Up 7;FYKRE 7E0@!{1Y^AZjNj]^vF+3Yn67l/vMʢ+£U85TOꢧ=-PۊxE6y[Q&}#-FeCzimnRF]"=ETK8j-zSdNԏ0N hW0ͤf4Hh~p9F% y $iv X=ؿoiK4) " i^Y֗#CvIp;G]sa1)y_ц{U4/.|LmCj֕gڎ2Q$fQ6YSL5:JBXMi ّXeWcQU^hl5,YƳN<V`Z9^_'"1Rl,Izs|Ȳ^q $`X*bHpDY)Y4G2ZD}3y8r}<q`FgE>Q0OpUDk*2[]ou(%2xe^3 `,:Bvc("%Sz"AAkgH |)"#I1M[8^“'c?LCQq~ގ&& Èfdt'q8/ݤ{ܥݤTz^J^HHճv]"|G z1ԓ,@]hBYPh ɷݤWqsA,om}~Ko?rw~bp~.`w{aJx6_!}}6FkF_Qc;&) 5uEFtGmxg6%H yenŭo , ;=I@|:W˓2CkI=5?GC~r2JdSf=3XHJBYӏutO xڠڈhIלgq5 vTrk,&\!I-sgZ+45,:u/N+0tMZ3щ|`kiocMz  em0b@l6IeLQa`]BK;[#p(Au;<)ng6nZL;3H J+& +y$:06D"]vnj2CT n{ ɦQ _N;rdqoR0#4'(xA=ۙ{/7ɺrxa곺p">FByhwom]gT>̵պ0b:{@R6%,1o Ue'+8R3ڭ KUTk *$BWc^UDY߭+m"`MZCW Z=UDKD*#+eukK*֬5tpVmUDYb;:q٦c,h ]E$X4EB0!q#\ښ7~=lGWCWIDJ#X{˶UD+?wQ*16\GR.%qC(E輟<nAe *0%9#_^?>Q9Ω)) F}XpN $uzgaQQT{25#oO˘Q#RF蘏I> [~ ߪM8çǯT)PEHP- <}20*y!TUpMR=x:fw5 4=ݰ`}舢MbaM"oR7,'n`7nض1fUXk C/ovʎ& .-t tꎮ(aL "oNt> tQ]]-Ik!w}KXLED*ޠ Ѧx3 nekh:% hY"JQ4^H[s>ʧNep}H9LX:"%Vߪ^Ч|u^\JKYHeq⼷S6aP9 ҟ&fhn:]JxGWGHWKb+pq8AKNҳ-8ZʼnnIbhĿ3Xt_幍vYsm3.%"KLcA 2CLP;04@T0s+SlԜb[4:DZftƘhi}w1 Lh*5tJh P1J3"\#;S;UD+I*T#+%n3 p%k ]E5DQGWHW~P.GŤ!V 9R~),_m|8fzU^z[ o{w?Ap{5`:b7 ~RDQ=vwD,t;5cTOЏ{Yu/~_%*;FRx ]jUD$8R0楪8)ZSFjI+_8>Ac"Pwb P39Q''9jzv*~Whʶk7Tm_V)ZK5[o'ɷ'UMeUkLW趘jM7"JM"푮"\ޚ߈V6~>P4׼r ҧ|,"!qonrsn@e 5, RTZd7D='> 4v_ nF);zfpŁ. >da wtmc)4--t*z(#+"dEt+pl ]Zy*%f+y`4^dn{pevq;2Sx:k*%I nR,{X=>arN)÷)*8]I|+>3ĵ_Ӓ\},t],}"Lw2C"4uԋ`T{c R<,%5#9>/nr-E?'块/xu^ŖeJ6[ *!XP!Lr0V͇U%BAe1n2H)Ycq\Al q 5"(E wyeYos9.^LCi;A)(EE>Ѹ|*%6!!Oc*5( H><K3g`j=0(_d*._,?.MK.Gg,^F`\_^e~)1Ei].ㅗ`)-˹iE$-_O:]r%S?TnOwo}腹wOh0,|WeXfNd#|@T,CFXn,$2SȎO,H<|E`(d.n8!7F ' `pvZץ̰Li`bFQ$_FT =*fwZSu!IU,>sYz W)2NH*NZjbi։A?tɃ4攃AS؀{ް A+ c 8%&f[)~c&,}ͬ":-nP%)%L{ 恦j'e!X h0K,4ϗPŬzHB[qmcn|:CŘ6t3 ,G0ָ˷p?>] }Wːτ|'ySEK~'go-]2Nar6ԙ@|WO颱1%rL*QVZA':kOoT*_`X?8&'}Fe&tqJ$%owbr)_[J 6*|b\t+`OK>qiz1(6qT;?[Z|v~0tSxҊpSIbq`|'>]T-;?Wʖwa&$cVg_06//.]ί_{gduQ[ĢVeWR.Gu\JrȒ!i4{jx8WSk]5]b +p 2@;8}CfYlGM8'|._.܋H=U\Y7 |!ejnrҍTEg@4p -TT؃r|ZRT̓pzzC:it-BFWjWA-yA!5k?vk',_2mT,.-[u72(/M *-">Uo׽0|09xlQ=<^ /DD>CB F1&\Le<:x ~6Afؾ`TZi=7qZȧ/cs &%򩺾 S-[09,c/(Fd2a$Qg.9gwD2VҠ0Ml>Q| .tco22֞󨯖ajLvYg*5s|ak)P<)#/RTPqY'/̰֞/Kr dw3a?{J'|"fU8|<(k,:Z%(.f4.y?גSggMcA`MKs=U|H2 ^U魟ìcMG+0c2h2Xg?;W=hv(Z稼# X0\B4c~y0*f05FZ7>}|#j`Ȩe|.*q'o[gE8cikKqS-`k U cJ#686HF_jRz\b km|bc`*2_7;ES{ې 8E%3x.aD0@Ndmhd_Mo~}GX4] yx˖ےB@9nOɭ,/ 즧7\ \0-jpSm8\mF[66 U~;x6Ӭ|[WX+ꎁl;K@BqDz9Azgk;Oeu\Rl-KZGK!`W1pV{ta2$|gkֺ-q3 mPt坝lW|FOϓzx?]jiߙz2mʘۮndG4=cPJc̘ך{&RK0*VXw1ev(IoBJ2)CV*j"Ӟ*AX>N4ix4QM˙& ʙ&ʙ-Ax<T*2ɸ[-U9̝V)XE҄#^Gd&gj۸c|TܳcW @,yHŎ{AQIY,I,f]u^t~hSkj.%Yf]7&^r9q R\\>2͜GEWT WRԷo\Zj*B&H퉭rn 7϶IWu´\˰& z=zOeV8>FurN3x߆ZzU.J/z-_䉵_ͭrEWOb@Ƿ5>yβ[>;r8.77f{>WʏاRkj}錔-ɔ[*s0HNm.ڪ ;5 mRxAbiR{*)K1er:sqyu+Z4v`ot/O:,O|~~ls#լF5מ-fP%r)SAc) s>N!ruƏ]܀MDG'ERmgbRں69ޑySQ&6P`[!ڃFU)Lд7)jRK5soAȖP""j3x6\j c'4sV_~hh{qZq<y2" Ox㥅fLL\3dK%6' Y  sW;S!hmMvr}LE#`ŷ %H|K:q'#4s#Ƿ 7]pi^T\ q1p_^H$YJqqV%TzI]Hс%j"ׂӼx(PaUau4i#տōܑeۚG_X=۾{߮>?gFdH7͖U4!e*`SlǻIՌZRMF6/us,ѐ5&J @65ӵt6ycb uVMu4 !5w Q]+2L6Ls:ח|Ofu82tsB-8_/ſ\{S3?c .~G̵&ֶj"ŮIe5^\WxnSN {bf7nЅ2yI{,^ۑ=o^Hyó)8eknEU2:ќtΏvAȄڹZm1r-߻Տ?XTzT/D泾EDms7~_s{wy:?_岊3Y޽]uEyvWs޿9F:cV՚lq7hUgRE殧;rGQթuoc-w_%yr[ZUı&/M3;%O?2;#:`WME7CNtk,}utƮK1= ,L&SS.<TuM8)F_%\׼.,wBKֆS-S/y= #kA.f˳~8^_^^n?ae${i.JZ'eBʒ"i< mX*%Sj)r*GwR_9h1ߩ'`m@*VDUq.STO=f\)f;߷~{AwiȭwY񷞷M\m{}^__~㮿?`>8VzW>|{LMo9dWpf_.>~]GΕa|zxQ˘Rs/|B+]ݿ{j+J盓vC[v/~Mlt{bűl@ObjF̍RqEd [̫)roIv N!%JR#6ZAet%SB9E-ġy6dIpxDw9CsEjnxf'뗏gH11lƽ@Y5wAgQ!F _Cݗ=bp| Y%| Cm2S3E/RTcow]q:[?%Ex/*i]5rpk`s ܿ\xm7ƞGnPT/V ӲqO__p/3vu.F)W;Jk>G;0`VWl(7{>Q7>:|F;{`vAr<4v|ѡgyc.zjL=D-"Ĕٻ*яm6rrl9;IkJ.d(NE,fj;\8sϻQߪ.O_l\lcl~{E 3QVZLm)yZGwm|wf F &[%%B܌)!&Zk{/3F01v炅7cv֐07뽖IjԵlT w`pf~m8OcKxo`<9ora\vG1 b,Us=J%9<~gb=b46I B| GfF8DOwy{zZ1Ү :xyJ9R(i5H(In! vDZ1t fͭ ΙTVF@ h,b8 cbj "cj J vQm*,˸U:2Oa C BРFLt6)p0GR`+M T5{x5=u(@O6!.8 yD^i$3U\#PH/AEjH Lc3hw! Hb bA }lbI i2ΌA8q]˭u;kog/M5iuJHNo( RJMƱ^Nv&$_ =dya_8d7Zt"¾Vwm>1IxA[}mqqK0uJjдU@u1'$ Ʋ@|TcIEՒAHE!2*!:EA>X.-Kg4zOxHP$ m>zī!ۀ8Jï U'S,TGwAkt-Wwlq ܶAM!'1*g7%cxח/q'س+ +xm]X8;K+$^z1U@@"Q%_Jx߹C׀R"T2=S[M3-h٘+y BnAz>^AJ-`K m1T Jo% 14~h?@d,ʀGf{Y6uLdPZ\G=@+ZGhwțED]kBy8Xl7@>m~Vt1'e5)*G nctb:4T#yg Pf!mT+u[5 WTh,e"俩rV5U桾ǹ.oG x1ĵU FseXs!]+Tޯ}9i?>_/B)68~M;;AJ=QDȨIؤmi0PMnΘƒćSaBu_\*H$1HeMν\7[%}w &/n1襾6a^qs|//x0zA"Z@i-VT1$1s(tNJy<ɛWm=гCdR%5dd*lL2Rm歓kC=b Q!"ȣt@';p<㋟>ӳҌn2/9Pkbgˎo,7/w~f~L-fmAMA݉-6/nB|i  |_8Wi *L+Y/A8zȺӊjWRD$UJf6;;Ow,!_ JqM{4^rSf;pѢr=H5ɾ @ jٍ=;=46W; !Ʃiŭ^@V݋wgOΎ~bOoRVx/GPG^;7t<7mogouʁ9+/~Y鄽x;ofD=5Z;m7'ǛSxpQztj~|te/]L:qs<"㓣7Grtf7rO1DC~p6x@ylzF(^B)^Joߜ]\˷g4G/_{-VonOZ;7Դ4:T6;qrK!|wfs'S\I_B30?nۛߚ辅^݈r\QaT:%6M`k۷{yg}sV7Z>fz}4ɘGs>~wԹX\,^M 7a9|&]FpBև?=ANkm{0{hoãm7Sݫ;mn"#CkVQ2=* Oj)V~cZ>rL )hիG\]{ ǯ^sElޝO 9sW +w-(?^n7yQA,MUҟ-;.k\+ E){\2nWmU1 N6nt$uIBee^:w"Cۡۡ~;tM?}bҲU_z} ]'^n 'U_g'BI- }RuQ[L^/ɣ0k/ǣxmx< ^Kmu 1 ?:-\tys 0"hDc݇렲bͮ4Wi7WԚ+kbYـ3%bW0FɠI4ף ehihc-~BK>_>|_@6p=1PJFCJFz<LSMe޶3uޓ=|N_j>jun3-SMq) iߙx6]ku~Q6DJ.go} ȡ#\ km6-\Sg>@1 }->9{W|ms٤}9a$;n6-J{ <|@_|sFm86o<4U."}3_\*Lم<@:ʣƤna6֢j룕HCEQF@- Ajޚ:pbL#z5!L- RڭeC>ɬ6* JUYݛVjByAmsȵ6' ML,I}EF_;2ekk\bCkYB5OlKwwz>A$M cIAԵ:d u3JDjy;g!H̸C-+ohur \Tf>nhdpwmKM-3F ^{lPۨd|p8v֡nH՘s YE9 )#qH9\~gr9Bqz@79{7C{)X'M 1KEWdL1S})IBcyǬL#DXDdsۆmÌrXɉMO.%r9)-w8B™1QQ>cWHKr,jbbJJBjg 2 Cs^=#9%VL.O6Zh]Q 083L&qq>G;xԊX쁘5E&o1jš*P uTV@.j 3P/+Ij5oK)Z/N2 xva4U:<><4# hd0F`yU56J|Xځ9QVFqlƗ(Nse] q6f:2FKJ=ɗ6 *H U`PBQZ6g#akV j# ͖@nTgxY%/@E݌ X3ɠHWLŝl0AYp%ِQ ϢA vQr(g j(XW0Mm*Xf4724i1lge^O> 5b6L'ߦT +w C@zKap,T`TdnH[ZpVxbG60a\:?%!IH!-pPRH 3 9*7#l22lb7ZT g%c(Q(ʨqB Ңva=.RH6YgPP4XQS2yު{u_Wۀ@Wvi<7y3\OMPHoc,E$$-jqF-Qh+HbۓAvj|Vc z1V$ m69&*vڈ<X+fC}SȀ( '#J0&d!n \D<7z2|lYt@--mF.@-$3x:p</[0MW%9놴J'hԶɓ#]DMHR  &ZcVHTDMk fC! qGh+,q]/5qUǠٲ֋< Tn:,T]ZɂNu~T}g:d rL.j.Ib; >+ 7ۖ\tÔrN*f.2xhDv(cqw`6q-[,OhC*~%_:JH0RĠ"Tu`\3?{WƑ@v2Rȇ['^M.6~6Er9d;!)RȑEZ6̰zyGtmg! GKb 9o̵sz"=5)!t&Jb" ID HK5WX=5N)8}3(SŊ qMF)@hS))<϶i nCm 8!`z`+H~ ۡ@y{xt{vHZ-HuVJ4ǥƢ`MvGO4Yޤd>x"6v!zSHKy0Gnk% ᝈFi*k \5za"jd {LE+a4%){zRV6^'R-؅a?olϚYm~,n|@,(b{(, %|Y,PJ3+d:g[f9xB_} }/3V3/ULN47sV<gm@_BixQ7Eq5hLN?VRqn];#޼Ny\v'kN/֔mx?Mۦ ܘ?l-/Ygw|]h\(11&2-S2dw +DpWaFAEo3uڒr Sg5Xf2!3T 2A&3d d2Lf 2A&3d d2Lf 2A&3d d2Lf 2A&3d d2Lf 2A&3d d2Lf 2A&3d d2Kf@-U׸o98i;͘"wE 'SF^Vc{Uba ZK޷KK~'lߠOdiv䜵ȷ*I @ad`AKv ")J%xj]'uXͺ3q6G~/|Z2{Tzf.ң$=0 jR w~#խ[_q|wN*3Lp4k0*y;ct`2xTWȎPW:gC0C$khaZ* Ǡ.J)EҠ(( 9z8Ӕ޶VKʭYei퓎Y#ucTƨŽ(,ZQ3)#ϬL@Jultl:  W/)_<  YB5AiŎmJ-DRmJ!`MAZe۔ZX6} ܦ8%fbL 21P&@(ebL 21P&@(ebL 21P&@(ebL 21P&@(ebL 21P&@(ebL 21P&@_/sXge~Jq5yu0.i\@^t@t|KK|g%%@ hMz6YKh!̆/S7fYʿ݊hDsVn:Py Q.FLT`VavL6x\*/ߘ*t ]|W3qV߷B{-'[e +s4YFNYx_;[ݻNZ=ZDqn|yR0Nz)~T$`ݨWSq\n~kD$M0_a݂Te`@ {)y<3Aߟz!GjqP7qnCnj7"23ΨRRd)/-#"ZnAkDZ)+$D(ъw2iee5r'*]Z; ZBw,>El{/ ".q72lf[#dXMP d̳yv2N<;g'd̳yv2N<;g'd̳yv2N<;g'd̳yv2N<;g'd̳yv2N<;g'd̳yv2N<;gaj=,GS5{W>T%Rr, wH)Ӽ3w"ƃ 6k/RA96I.&\^Y$gtwDKIk!K'H?@^9@^@^@^@>i J ײ"$ dPd1 .LLpøD ]y ?W'9j}?w:{v;2w8 p_>H/WCw1w~dP)V?DLxRy(5JDy(%9~5> sKy8(&bPZK\!xds͕QBKy@ 6DBq|@iwsRL͕eF.v ]qe fp~ZY ~wCb |-a4!OВrcՄJHvIt4˽ sWx dzЌ`bn =w>֓^OE Jhc,: Ӽ#0KtFz! -uSR R)憀GQNƣ~}B.:͌.ޒ֟x曢_s0;\\#Lֶ"%ܗYUZI\B4J<]veKoˊa*`^',/kȳ,fvo'F-if5RpK(1diGiJEr(>-(ײH˨H\7s$w葜p1Qri6WOת5WOQ2iR=3W JesSV#d@`+WC1W(TnPʥ\sŴ4B=sZ\컹B)l^Bo?:r#*3҃p )n ˸{m'oEwYVQ8*:[/"VA&xZiTkCW}z }%şJ;.ݧY>e~]Nώex9`56nj']'UOē>;2a҇gOC9EiQR|4FKh]s"m8Ğs6lbGTWʹӻًQhx p2  Д&)BaZ p-ņN.; ;0YEc2lJʋB~{~u:8̟[[5;de7GiA!cW!7Zp^EGibM$.D/@**#Z-³X\˳e C6 /`<1b]%hx`ś**u)!N բ*(tH0KmԚ3 ^ S)%֓0t&'>lV[[Nu |x\U1[Zc;͠Kw tjpҟ5:ǽr% ,_ʘXe~gwB+%tda I`u6tcxE̦dYC?UAJp ) 0>2d2X%B#-O'uϗɕ Ze8N^rsSHl|+ \*AxQ iFwZ*~u-ZtvS DŽzSud ?AȎ.8*졇MjGHA6O74 >Xe\j@KsD:sy`~LPZ [V\B,eػi4/w?ΒT~E|ų4.59t?z>?ݨ`A|4D`84)L `nFp2 >49cFp(YKh6uotGPJXS!黿p7A^MIP65ߋ4N4Q]]bşX8XK*+[ HiT%!z~Lz⻫qbVZf֌=mpy oˉX|b|05D: 6]Nǿ cXL.\4tztr8(I4z4+SOV^-ǟdm(dM6@+ yXzۤ i85(:~P_qP>Z\SöisVk0J؃dpڙ*dZ4LnMF Jە8/=UkKcAE~Ejɻ4ht['^ۢYuc|뷼4`F͛mכ4?hߟc{n8X}nЍޡwz>a7N7-"ɋE 5Z!oa%.n햆ĴlI"55ʹ{kme6j{x5ϼЦH)p;FЖ.F|Yvhx嶗zԺ [ڕ dC%mmn&ܴts: y5OS^ zelgavָX#챼 ]:<6i3{3Wog e Ǘ-6Z .S7MYj}ZMO^;ݞ/]emg#7gPΗx4 U|W)HXxV`IƐTKxHXPN)|9,ET})It q z !t"&.u*(*,{B2 "VIAQy6z=\:/_g粝;dseBľ)>%ƟO~۪хˋMszw7dCOcnxn(!C2L>RjƘ}*G}7H`"7?{Ƒ vq;N[qwp.Hp"ٽ6k]SLR}!)Q)jJxđfzfUzAt,oըl/M!D{ClHqt.C꾥kb/ZꧏNw l4#Dͤ:HiE r$̣be]!X|,#_sonV:cxV;A3Yx)֞`pJȘs0PGZ( RZ_\dK lxuu>+O>1󞆼^uo걞詻Aw}"UyU-bLM?kM lQh lq{r> 4U=r=jzd5F7gُ~*|z/8 pm*_ӍU˽tܛ)n#Pb;u8:(r 0F 8ixJ=SgLQ+LD5/瞯+SQjn4n D*?~_ʄO4HͯN*-[b I7 Iw# "|jlG|yEg0iuA3dR s̃Ж68U\H}ΧȲX;*t,p9Y 'm9w6͆Y+@gG̼)`[qr&-k~>DS)!f+qiOmC)"KYiюFB%$LҚ ̴3MqfCL i!8!̀Kp@\0rUj唍*t 9A6E8}UMdBX*tK\gV#>9['t|g3LQ[$,}w?6T5gU@;=6j]>^V2YM(,)Hsq;=$c=7&2;{x%s]wZƳ&Dex0ZRxyR&R2׭?Xv+#\ GIpHY)Y44Ȭ2ZD…)ެp`Yqϴׂ %z[,-UOlnly*ӽ|E4:F,Xt&xg%o*YS %eIJLLB62T#NHCS)V6V < `>⑦$k0CHRc[sؤ_[ta Vլo0p} $Kta D>w2Pܛڅ?LǜU c!i‰0!hh !fr~<#PBUHbܕ~0>94U`́IY׎8=_4EتD𘌠 4–@/ϰAa%=db8x/kAϝo3=gkBFHqJYx.fr ]-1QF\LpX-ӹ3mΎ/Sp>DžOoA77ܾ 牠%s߾- n)"[HWaN}yEݱfiE|i(JVT#_aqXjWq\O ⋖2SJ_a $[>k rxQ^i BLʆ} oBw.nmhF OoFS#lXJ~oڧ_u j/O'&juwtަgɼGuIm.X_Jݴ +)|rDqqz1[z>Fry6ZO#J!lN7\aeMi n}&' ԕ5(k_ixGG۱F'=g49JB:4#|ʱ;R)e-w)ET9xDǕJ\degNzѐm*ȹN'' 0AL!r9s9 `!YN}u'pއ'I1pjybv!l%Nb"rBjiD_DfCeFF݌b͌hRt3|V:ٷ\`b|zM,+]݆3ZV9XcwjkcL.Rn?裋,1- +vy@`'@w* q[Yi1K.HP fhsVR4ăszP[2V#հJ5YXme]ƏG=0h@9)ђɒSye (#bY_LimJlieV4 EQ0Y̊"6`At;f]%X9H7}xZ%6N']mu+Ue= ]+y)ۍ9ZEP"s*qK.RU .GlHūc*k>\E1ZHW$X0pR0IF1HWȹ[JpA}ǾQWzDܵ!$Jk̤1P8fV&* >zRC+ށ@ BAur (ҍVlJ"g0b6d!&-H\v]5r'I.Λ%sYm+Meh8ŝǵ6#$ Da.(}aF!h$D5Cžjc_y ^a[2i}/YGi |ϸ~pY[s;L*bFlU?+%FlQZa7clCjNW\Tm9КaI@eL2` LB6^zS0Yd" g0u!N;kiڧw`'O2-o|Qz!UdMIrfޠPe &E;;S*+LiNqm4`COp E_˕W:Px$<1B2R(AA<q_ʏ]Y:NŸ4ȓ3뽼JA*cQi{\S d Q\j&QGjMPa?_* {d#&!Б{M0MZJ.Ȕ0IM,Huiâ *Ht'<`5'$ Jw$xB:*%4xaQh}"^78ŷlmoF~^y#?Pj<:k/HKAPR>S^,N^_Gv~ګ;3b }}EǵOl[ >o/!moi%sZu9Ξ|tԊ~t>{^-OTlhh쪮ڮz֥6\Ә&J^C}ct\̧1e1^VȦ5Y?Mϧ4- viҞt[)D7fe2%Zܐ-RsNŏDiX}"e>-_2K'2',^ޥwɱ%DOtsΌۮxY|}JNk'}>DYlzGӏi.I[m;r%'lóovIlV,d6[)EL)AL&}6fG&oD=t!O}4<>\:#pdJd.0Ab)j{&t"DpTi &tbGsGN^oªiJ,u<0fuíӢ+xI4 \fi9 S(C^`OϨ2].NFv[./.SoK{W?{ u//ZLHrhsqϳJo(cP![FV-9صՒseo2řN9*X"&.Rp$!Xf42,Mȁ 1*HE ZYAE)4(-%*CHP+ EP\5r>{$8+m׿Fp6_oֈΝ;֖L.5>{ds~,Қc02Rٻ6$*uSV^fvv6<"-)RMJ-OUKI%ʀmʊ<"8p^*F:9 ƅ5JB:Cϼ0o޾%ߎV06$ ^mCs\Ck;m߼~cI@ VJEU]N/F=)BJI*g@UZIk^9~K _w0CZZ4x.3GG(! A*V!W.㩼]ȵX*oW.TrW~5 ?W񌦹,Nn|< q5J߾]v;|fRlT!g)AKDZKp+-<2TgS!N9 ˧>n-՞(`)L,^6Hc ia1n+r: #_`ڿR˃^e\gNC y\6>/.|ZE)(L-ͣ.-(7N@Y}ݦg`%8H#' IP8 ZAI@sKWxuܕ|@"jkջ8&|e\nB k-g]fI[ _*qLPʔkJ'0 dâ|Uo(=9 tR2B9ą %M/Qr# )ˌ+cmccSL^^u. m-Ơ>w[ g =́ ,6/.KEت:0&^E+aGg >eTpFwx1zʒs%g+K)ଗ>%RtSʂ⹘K ÂspA ZP]O uj \ftPJ&tɊ묲V `1 F&sו ;#g7))< 3mցW3ڙ{ْeF}eO2rx*kIF>{2G1txvR/HQJF4Iwu:_qP@x;hlkc:}u>hdq'QI䩔Q ͅ H9QBvPvҫL(HZI}ʶdߛUgXo}gHoOۺx| &/AݝCZ._}dt3EbX]L(d $IZ3# Tp>p 3L}8!5щY0ơܥل+lȤe|gK oGNo"tAHHޅ"+I1쌜8ˇ.g=I_SI>mEoe==>k%o@UUALW_ >y"NcF`Črфj'*Tjnj˜Q®駉e#\P0\6R+.I//vMصgJP9$!ՅWDtVŒj$he#=#uH5Vp}w 9*'<`4FR x Dub=~祆PQ[o?V|Z2LbG ? ~ke(6j6x0'mwof8liLO&hTiL[fi͋Ex!sg>44m&{mRsAD0HAyڎjg>r,HzǗyT$dԓeICiLS< n} //"4$JJ*坮O9 S@$bsk1xgMc@5I)nwnS6Kys'HTCr :qϳ hbp.ifLQlur! }o0.s+%jjp03 pyu',bfvdY9hm^ii}2E ;pjqQ8WR9*pէYuAFe ʕUQwno\ GҒYb`LR1q%x:g*)!SZwwZנ7u$Q,7>czח߷.tjCwB=N3=;'O *ULLX n}XEKjzhJ-ڃKs@ADMP0x!b Ja1'Hd#jȢZSP%O!8rYJdN@A&}lggUdRbe LO68N$(g.gXe,fv[(3NBM)~]DVz)GTĈgJ{H_!i.;<  Η yjӤ|bF%d*%vTVdUxQqH'ךбv&nEMB:iVybuoL>3Bn).:Y̾1ZセUiO.~ߟcB6~x}@"ZEse˘#[[K>5G-QT$d1d0RȨHb޳VƔUAГBc^a&gp:EE:8G K3qZV./vX&䀅2~Hˋpw8<,9rU< O!;ND3b,Yց*x=48(`[ĖFfPȞI6AWQgv,&2rH7WFl?5y.cԆ vk"]ɜ2U<qZ\He:ȐIB9:4NeCۤJ r(D1I#"& ;)(2SIcnjl&cvc`V\dB &-@K9uuٍ_>$z՞pq^5uNvɾ:E5​[?ך։s2Pu t6 >{u:F#JrVD䀋svǾx;CnoRW!Vd397NّI [ я'~€tH aN&f4C K<'W!=nX\=g1cn?uwBnm> Sݣre{-SY?k P/2T4j9z8'ތx_OFV4r;~mo!+-f^}?_VWofϷJ:;&n\-\[j7[sжTwո;Ɠmp%ȕXKyOϳi2q_LK*řdqQyRۋ>yeSYvĽpf/b&CB.79$L1dٺT!FЉhAE@ (Ry=GH1HpD3qvSrM?o0&U6ny~7ӱ;/C1mSJw]2iSdLym}kh1@;ᠼ(oc=f55Pl}5AG$ng0+N8oNo4%:7׼7RBRkW2hWASyϊE.%R|n3 g%KxBJt<>tC}3c,qJ̤L&):Խdj I-Æ!)tN7W1-9ǫjW/:ga^mnVNxph_#.~_X7Nҫ踲 fA0XB4jkx"6dc\ Pr`1[[IHF[%#)p< `>$KWSbZ8ZGZ'~@~PJ?ɒZDL/t6KT15[!JYIŘ!SY?>{}Y^ְD=g@(ekZlٶGC1v (kfww Jah/dGR ^tɫy*28'в쳂^)IVrCCI&<R%|96a|ua[x胛2D0џ'8ehFɝ|MϏ/ >ܟ[_ҤPf\/÷{sC2+w OpwzjzFǺO?߻iq [ [C??Q,})qfwod(ξE@Pэӫ:j.o1vDhm3QE[| N-*Ѧi5ir};yթ^:ʵg\ӕrlWL~馦݊6 "GVӜGG͝ʾYNw ]GC~˦hfn?ӫ%+3r`?p [ X]O.Tp`#莳Z _nC!ceŃ;:\( ׇ@̛yiSxF_i"N3)DN2D so1*' J'Ci,E9H~ƥ>?!b:A Wm\.[!=c`4J$DTP>e,ZI0dtDI:Ciq}u.}f;'Swh=V =T' 9'ʙrbu5vTi V T_IwGRH\% "4&W)  G-yQK<<꽞؁biK-p*q:xLA"# ,{#<(ِE[IDU3~xqY52pR" ,T#K/K; q員HmB@` ]Z]9YUc՝2{ANUEW+n}xv2ppW 5z7ОkL ü B_)1eօ٣1:;PaU#,?kv)ɞU {q~6Y#ޱXa/oݲ.q{ qKL@B)^` >ET68#IϷÓ6Fn͠(̉(ZZ{ BX@3`Xd\ LZr10'i{8$:S14ٜX % g-E6\9miN}Ӊ{1>lb,} JomTDlT9+7ctg7XVL;|W Ŗ^+`1>~xg)Ͷ[76ut:ȰI<_R12tV./sFgL /~G@xU:ӣ'YijoO,*🦴?\rdgi uOxVڕ#iRG %SLduOֳ#΍kcۥm=A9hHijEf]{l%|7凕XY#: jlhv]oGM>}jh9UrE;d߷t?4Z~ս&Ц2ވ;:n}^nD-g0W ="0UWA_H{pU0 +#uEk\qڑ9\i=w") \@qsk v_.Kmf+WrDb/L}1|-fg~h6],FzS;Xq^ZV)JZ*`~º<*I{*L I\zY}"s"%$E4*\#YGs7~96lI|Mj鸬9h:IoqZ}ެUgV9"~T0x.&2)|B(P-fWu,O.ъb$/ p2!hUu$l1?/Jgb4Fw\ew0'0xU*V@y[-w[j͛^URJ_.檤1Ozff7!7/F/!\7pU5+spťepJH`W;gGI;\pJ2=+0*J*Ҟ?\)׎^\x".UV=$)^ \!r@\ \xԐ'UTC%•BOv \qmoE~Ur Tz9pr{W$8§̮ઈ/pU5ኤk^\:߿GpEۓ'?F\ɡ7 $dኤ^\9e.,OTLӲrV׌H(qc#֍JKy6Eۯ+ OnU~ݦlOE$qO]q "%4 嫷p8km9 0-,v7 cȒW{WMJ4ы"W`l6XY gޢͩ,Rנ+թCw0ݡUB{EZ(kG=]]]t% S;CW ]+@ˈn;]%m(U*(]`hg*%qZ&NW =]] ]1e+,5\=3`B+[vPꞮ. Bj*U+th1fm.RT+,( ]%VL(J!$0]%;kWW"Jh_z* +(Ct3vg*tP %ŪxM䃌!CW0w~^R\$;s@KR]VТ4hZj׏rw5Ț_uq8BHh}Z%gjU[-F9ȼ&JܮZ8:ޓ+)CQ])mUBZOW %=]] ]'XuǺJp ]Zq*$ +Ct)I.kխ+@po]]"]1U]`}:k¥+trvJ(eOWHW Ny芧/Yg*+tОpz(%=]] ] B]`Fv8 홾R;HWp%Q kg0eqZNW =]] ])]`FԞ hYtut)zέW.32vJ("Ts?cC®8RL+!w~W\`IZwԾ|Y heҤIq|@+D)mAd- 9 SMijegڢRk NzLaXUxW*խ+@ڶEի5*3t儵J[WHWરlMU+PW*դt(E%yOWCW,O!wLp ]%`BQOWHW.s!J ]\:c]%J"{@0+( ]%tZ.NW eNuJr.]%¢+tв/'=]]$])R+ ]%g>Z[OW %JSʕle60ΐ`jE,(O\,_xgfw_߻w`@`0OvnA6^.e(6/|SKӟ҈|y~w+u {=omVVty7kJgJ}FrJ;} -}Vpt&?~718L@?}gd ?{3~خMgXpǫ0OtH~Pbq7^_c"ddN7SEZ}cu{8đV/ n@lxZbp>0o)͖MϘS,g#s= ,M=62V+ˌ/L 3]6\XN {SFXx<+O*2D8cKrs'AU2"´҄nz_hxLG eZ30{-#ChFs+%byf8-_/X,?W}f͞¦OO2}fa1܊η]0έ(R`g܈fͰJVvX5B]a"0._Up<<'*KHHJF0rQs \X(Aj,w6ơF50$5A 8$PNby+%RaEm aE Ҋy642  /ϝ6UdF\@7j ؔǻh$j}6|ä |VU)9ZWx-g?xex2D)[jdu Qz\ X8cQ$ 2m>ZҊPG POp).}ԖU30ksU$T. s!EDڶX&z;f lǹ[;(x0s8l`^Hhz^' mY-w2 r\4TȠgj 8VզƤT^ yŽ۵*Z+eOD;i 71ƍQjm: d4V2F{^| ^l 6&li|V~|n9j`'nVl\< z}UܳG^^<~<3ѐEEyD\d#92\!9G?U#Q]oj}iУD`=JN3G$L pk-eL;%CQCO!E!fnj&S;aO ~J\a Nip"6@P#JqzxH& B{ًۨu+¥&`5)qZ5(K)µYzqXi"ѩ[,_y&$#=_$رi}4Oi1$;eFIW3b#|@T/-2J"#xR0#"N Y?MZY /)ٴ ff1^,@aIE$KaQiƝVc  (B' V@~P|Μx+놲U$UV(|tYz+R :!`rD-P$}ii,2ZSQAj,5 p0Өް A+ s 8%&FέYE:eaQ)":-nP%)%L{ 恦j'e!X h0 /-?_*kyNxpJ>˃ćOoP~@߬8fч <5Nq˧RoԸ,nWOc~<3hLgoW)+3?}7y| 7G#ȜO>4;Oij  a2}xS;t\s T6?<˪ C`:Sz$yb<ǎ~ruwfӇ*1(_(Tjk)_; .aI+_<$а4X_Յ鋋޻13P`߮rg5΀>s_:ݜE~[s3)NZ| RӲFk!z<Z;Of@f]>~al@NٗvaXaԴ&2nw,@;,Njڲ)7E.gZ:P |H´zD߯:Ǖu0KW bY-!+ȭi&pPNhӪwKH<^WR6i$BD1V0^M _ּ/XS ϻH'Y+P1e}4E3׮mΎ-ڈ Mq+>/y3p /DD>"EĆQ⨰A8akRw⤤6c{Ѥ\{~kG?wYL5^b)FDd,wԧ<,rA$RM054]k>mJ&N B >y;Ngj}4:M)hY ""/DDŲ#ߧQ]eΧm]]ȕ5*Cz Y@SJx-`(l+\;w0Pqx?*߆z*?P7ďpI{K\7]حBlk~`zm2E.DY,-,xGI4 GvOs=NM!ٻƍ,Wd'lVh,0=`v3AOtۖI`ޢ([mJ"-6#UźέrrKF58-8/EWmc_ -U)qD@SY 8Cg65{{ Q6!"c-L 26<.؅moʾHnz3 S=79Zn9O4U2w ŗVlH .'ɿAkz:HJ%p /25-cywL,f JR^kfe4LLyq-}ԥ:6"jus$qYt1Kio4ѼPa=xӥ3kީ97 J)#9}0@R\*ϸ;ީNDAxuH9WDH5K2r9}P *=c9T"I"&N &pT(fVs/;u/z兜$}z=X q-rEG54{0,rHR^+H0A䨠)(#h̓f-TW-RC~+~*enq^qxy,-*Q^Q]?9blSm|ߞ$ԅޢ Nfk_n<M>bb?e?Ŏ6KU*h2<*g1 d'7$ʾhW vrj[^?S%BWWlN Buϫ:۽Y]I#F꾒ll' ZV9ۼΚ+r>,PIZSdM|{F|rPS*uOy̜QgBWސ}_6߮v=mlUnU  b  ߻Pκj-p l oܱHuYeU)-J6픷n14 f#~ٲ\欉Gϝf7\L,D{Sp櫯L_xɼ  M ,Eوh*EP"p),e0Tw-q|5tրs1\ .&o[!B&rU^2$ "sfN`)Yyz^>8~&4X&[ozχߋT4rhhDɘFIʐ6Zύ$11IPDBTO5Zks0P@xj5&f6BmrOq+_i$1܁4?0ԇm  K?:j %Q!߱:/ѴbH/nokv@"zIQHCN= eRs<Fe Y`#,\@R#5@2o X{CgR GO1D1)S q_'/Sm^x& $&! $њ)z]Lr:'خxjό s8&8 TFFkMII&YfEʦՎFJo<ѨD2ē 'ZYaĠxjg6&69f+\n|zFg1oഐvẐ 87UUW_}OU kr 4W̜hNW^NKc$62q*Tʧnι4v$|3֑<"G"yf NMhȨeZ e%hﭱR8FY$V蓊<eK_$`% bl9@I 1jM$8h$ cXl٘8yWδÄ7Q4KrMCu3.rwQ1ևϗnʿ%" ~f4P FE4"zpN8 IR9 ԙx ]rK$1"'uS~56^8X\F"Y `h Ѱ4&ΖN)H)@*͙+B 2f(Q+a)vQDjDμ`ڤ)H6?ܺ{b3ٌՒdkT')CkNq14$ka1$ܨ@zUMre{k-2ucT:Daߟ14;f\.\@^ZcsEkGG6d]YŶ͠~;@ہ@4GBajJVR<,YJ<,GPBtx,Lڨ,-жUrZע#+w p:WY\WYZm,=\!\IN(\IM),],-UUtg BWYZWYJѳc+ДU>xj^] UHqLQ跟 GӳTi3Q:=[+*?ۼ_Oo~\qs̳bz29O)9Kr^%]Bˋ˥|f."6U%M{W78Ou%|{K}pTh5eǤ8ZqO%C>IqENNO|V%G1<ew b{zZTv:U8ﳟYtK[HO\Vԃ͜Z6}Ëq͹YVg~Ǵ+Fրy~FDg*1R$O?prA~S؍˪x3EU_X0 7P=R+.TI]3=ulR՞ P=yϠz;SҀCw,.́]R@^ o[~z%0ޕy2Pr.bpW~z_UXkhg*K[Wy8݂MG l4 \qɡ'-JIJPD {MUw.ς,-k=\e)JRKp>xlj h;\|Գc+ECpj*B*K{vupV; Bq5],l=\e){:N2DQiep)/bDp≖9?ui,e ޒ7E5</F91᫜NޕXQ8͂tRٻFn%W~Yܶ~vI.`d_nfs,9lsVKj=Zj-K{xnv:Cʚ L,Fx^+X⡦w^ P䡶]~a8V"{5Z3:U>n8y)>0wR~#o:~{DJ_; cm~8Aߤ:xƒ\.|*t|0)d‚\α.꘴=Uл<,-")u8)tikrky LqY̘C:,Ncʠ 0~ҋiCd[Ƣc\gd]mcȃBu=F/Jc[I{ɱw gZr9AfxVV*Bs.R_X6cy`Udӥ a%D$a`㎜^1d֠dϧ|Aӕ1c:h)^ﮫ|G}x)$hst)zN|uO59E|ҿՠ,JZy+e SE[Q:4r] m'|2 ̙+7l+m;(MT)tAo.p& ^}-~-(8[y3ґnzϒΠu:Ѐx[spy-dG>se4JW4f4P@ )`iLQSoՀ{"_nRd\/dLAs9 #S؁5R6PgO'4BHHvy=R^f|P^G9l|zdhdRe|Mfh9cǚםͳr EdeMŠ9ԥ$-e{8-T})Rew)+ W1A#UQn.8;vS H v}U y@`gyZ9KdV~F+?8\k90:9DxhJQG{L4gݴ\iYV+D$ҧKa^ ub&(^Zjϋɱ=kܡM)D\\%%J*u)p$XpP%YWI @]~iKpR^tp .@])h{+{ii c)(L.'W-C*iIZ-J\\qW.R*i+ٹhɨn •`J0t0W "\IW~[|\%m:w# ER".@a/b֮*iZzpČ 0ΰH4\wu JU4{su{)l1$u1N}0d\8 }nS7μdsy\+-]VziR%r@k Q4cZHe3T1͚!H~y/8~K[̟iI̽1v< 8KWIᓟK]zjs?m;wJZpv "B v9URWKW%F7W<8g0:lm]'TY:&>zZyYH*r0@<ɷ]/wojp9f,ҽ;V g F1-̠@7<]o|=3ug||?4~?ҏ9L!2 ,%&X!~ {9o8?-(M~S%Xg~Cφx? @ *I."@E)/\Uġ)ut6+Z%lSksC`(WO?%Q/\_눢QV4L[Μb`(q ABPCCrBRvQ- ѹ?SV[ `̇롻ʠu҃w Kp3"ا1LPKI `Lb"XZYh6,+S*ImeZ?t\ADhh־7bţ;,A&EnQGMtBw¿N#Aq]+?tra^2‹,qP[d<5|=|fo2 =^ Ǥ5*|=X_Km֌~4=_ʯ A{JxA;[$#TtYm%@\} ;~y c,4y`ݾdk!0_7D?m@#7طItd3t(YsgļֆL!뫍B@e% 9I' ;܍0W7Z2z #[IФ?.KӋOs=7Na~f>OaysJt0lwwNߘ3=TFدX|?AɄVh .f% U\JXҖs_ KZ Ԯŕ0)G[9 (ej]. ?Tݭ&跹NYclS1 =\;NX"\! VR1PN*-~R6G rH# be}0z)#"b1(pHBcAީhˀ}z%; 0s_ߍz AǦZw|f'iK.?zr{IME1dX( P0q ,AN+E <7:<3Z`(3*h2:Z yQ9"p'A2 =7Y`^xs_5=I ~`Ms& /4ҚݖofHɚX! %o'jYhSs hie:E,^ӿ&#(>JgF{jee>F5ؠ 4x T0fbsQYCW:;e&#d: S띱Vc&ye4zl"hnD4lw6NM8Wqvt2z6ۂnlo`F=m7=sxjgŖ8,o4,){2\L;Dp}ûI.jJwܪx辄I.3.:IWb6O:d:ySlzxxyL߳k3)3Hmջ&yxdQ ɥC"fo/,\闆g!|&}[D,vuYgIwMuŹۉ˖Ӽ$w0ܽ Nvp):TZn~"nN,l |wAƕeNF]I'4<03mC@݃23ŃԽulsYZbijfnLX4Ll}<BZFk (tV(ôBPGJO33MJi!^Z;E9;eDGXMB4z>pc|ƎhyAdOJ̋րq2Cy0X;kY"  hn/A_1XM1[hpH.2fv=[J6d/5iETk_ c->K%1uAJƱh.@ RɹN+cP xUi0mmsƫw;k隁O>u~ 0}/B:8(ia <Dѧ·0ȃ&~=fQ+!'j]OjWu+ZKStޯdjdAq͆0Z \+ou2~Syidt?xF$WSo싿y~ЛS*=  -a#tFK  RVPSS('lFhBbE,c09ðNl&wDJwc}ci^.ޯy*׏쳀R;{AcȬBLgZ(KLˊ] B&`ﷲ ?j\`dTI(`\*#xG)?7?jHҼ_1RI41IE,Jg-#2v[qg][`#Ę&eR,_ (JP#9_UWW1" @0,6G<Q~^ID6mNVGûSx:p1 '%8b`>zė^^AJʒHl7` mPrFȱ. 󞎼ު~չNo;n]v%uw/3uŐ%LCdNHTB.qj>&Rp.R0=JW,0K$5eIQ#Z9z jcIcfnс:(D'WkG:w/ݴ_8~M'/E[U smj`nYv,1GZק鞋x(z_4ԕ 2]{#Zr$I/Os0/=TaTS~kƅa Yad,E+k]kr)=ks5|O(}q3Uۆ X\Kfj[Lo79][|^*8*Z / p4҂Ȕt٧J 6vMrq:>>}z3wLVorpTrjǚ4d,*KKXiv搀u*^eUޡʪ^0!qy0@8Єey:9R:ɒI<4;85Yxg/Vpւ 0"8)Q ;/*7,>15jPSנ'A )d`.F2tUʀ⹸˚2o-"hh).k  i K[YecB#v8}kS7Pb~ikl<=F`cw3}3AӭC8%#귫ۀ:ɊLP)OxY穿LTHb}CUz^+]ϕZϕ2jdq+xA"pN-jV:ICWK0P:+Ԧxj5U$=nuu9!3AL t8^7h37u["T]ROrI;f(Ԥ>ϡp8\zP8Swg&DZ Vx^&8ѫͯ-mA@UQWPh\ee"[AZ[m*~+(ed a$&|I`tv0-ӌQJ2aA6RwVp2,Ot4<(7sAgYY 11imi.W3vل@3;S*+ L_!=:w5mߌYHS&y8FdLecYKlɑ $-HBPyl qju4dم>_6oT1V'w4tTrR+.I./s !:Jq_ "w@0MJ6Ȕ40DNM, vi:2g\m",$UIr X`8]{SPwXg)_3c/6,pZVAq6h~`V|yy)i|]&/''oOJqs%,[c%bEթ5 =ө )_z;Oˋ8n%YdqS4"ukXv>\4tFSBiuauVߚ+ˋ75E;sS~߼EZk#{}VRanjwWB'6%a%&oֆ/䓓^]$} ]#c6wrwrTviF+4d.yhѺ iL]. don.Yv?OCꚆui\;4WRd@o{euGrCTbRsFkhjrMdѦj_X Ⱦ7ն$*a)6i%B%i'|,6~ؕk[l% "Ajtιe%'E@>b{jp ખO]ttn`si9C:W"sJA c\Tc"K+B(WZYaTxb깬x:ӗdbenMX=OfVCw:.UŌ_c$ao!HzfVI)}Օu1Wڛ˄)> 1%b]awS,z<$pXfg*A6jfAlt9Prd60xXܿS\b}&e4\ue! 6Ns͉~7cj}GqaE_$ |ےZ3n;RWM?l{LH *}0E;eծPm xrP`N 'e T`mL(9c=jIǀ\"+ IABPk4*EOۨb"ΙuN3u 3(sV;Xs`<4Z]msdegL~YBYWݶ1P﹠mPv$;Bم\ŎPvv-HG e* ePpB͟ņ9ܨgYIϔoUF*2ɦ LFiP%[4Kӡ=v" DAFqseKBD{cH*̢Gr^>)4dQCnS^< [FmR'M{e2x H|ЄQ1냵'X[c(X[ƍ8`m"X v=XP)ykʊ#RWD0-)s,ꊨ\v]]*{uU+Ô%"r͓ԕa"*ڌի/X] b')^¥oWC2Ťh[(m֕d?|뒛~J{ <\Yaʭw~FQ5LZ[QJ'SُՆ]^\oyǛVc~n ox۾ CZf~9>Rq ĊC:W̪{:ʖ#REɊʻ;fsޣ[a7~\S4Eg/s1qtsWudYGdLN\(<g>qOVrАq\Sa?sSwE1\j>,Wuٻ%9n$+mx4:jDFQ"{$՝d%T!dHx;iDгi>Rͅb} M!vpM^cTopsChJsJ'6V:;~a7~F꡴/1w&l!ia.}: 2f{(1ށ%Aj؏c@I^^ÖhH~-6?ۛT-z.d EȆ]}a|D3gIC>QKc1c1'166;gǀoHyZ c{TA1@G'vZ"Sxlsar:TŚZ' CXy@!&]s8[́a-11ipa}vӊ#\ܴ /PyU5tJe $gqL±6$Cb!%lŁ%hJI珁ş R)n vb[M@=dƃ`7 6'0 84؄!_?¸-nqSG Vѻ!q ٕ#݇jGoTzJՎdR·j#$XYHtav(q b0[PPg$8 \"l3@ &={t^۰`Z? F:]> GѶ< =%0A%b-ԭk2h w3h8S W7US]Ҁ?oɎZźcJe;&{kl4"!Nw8X!?'?>g}]Bl TI4Y`V>b-=ɢTE4̧yρ 7Dr}\AÔA~,Zi qr,ChW hǎ@HA D/;PjbPzm)󦚑 c55qVVuV=MB ;ډbq@`n:!PkPY+"|ՃJ}~WoxfI٣mjw$a' 0x8/sV3%|wKF̃ ;qot{ Kp|M+ޏę[.AsOH7l] l 6rDp }jL( hu<5 kiV޵5KFho1fx(/_ӳm6yIE y8 ;K~A؍:zT3Ltt.JtVcNvTR . = >1z( =`oۮ'fCcBĿ|?}Ţ0%>OnFK7M; ᅓ/UPX+cdtQbFr3, >*&M\BXEP{c\ǀt6v18װr+JM@2<֦] C}V(dDSY1vښ vDszd3A٧Mʖ?joܙA10u(5i1§E̛.); ZW(Q3_,V iqV09Q8l=-FuXOQpr !IoAbn…Ӡ|l{.¦5[źҔ"j9+EЬ;~Ir`M[j$tipMlr _P =F9@> ՠl?%~;|8[߼y+c6ɔдphboNj8d7 /ЅOg f>͖ B}$ T@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I?o(9(^' /wJ^@ט)$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I Mij@5%8Z& $p]% $h== $(Ԭ&' MN@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I MiH@$&4 I+Jh= q=Z?nVS׏ݹwXY):%kW =/d-Š!wmӃ}\)7/僼dA;ۃnr$wOp7a%nw/Rhc Kpfw?9@ _޽z}|P:r_k7o_n ]k=M-(~_~o^owrH\ }!|NWz| koQmX^kvxO3d POy82uo ~ ?o{sݴɟ)orlZi=xG#CKd9Kuȥ_;ٳ*6ӊsLe;Iz%>(7}|ژuf'sx9,yOzʇ5dG,GQxAYypͅyh;}9p :FJ]1q6BWӕdkcv{%/CWk]ӕ|蕮Ѹ9_ӆtޚU h9^]eI ᱸ] +2Q\m{+Azta! AMfu%h.(Bt|9,CW 5 ]MؽJ@`+]]]E&Bta 6P/|j7-CW7.CW=Օf*WA^4`KE;/(G~~ze毭7y@+bѭh6~='4#J=xN1>c`lZ;4HS=>">ui+:29|2e,.+sK>-_hW<t:&w+_Vt%pm\-JPtute;+2t^Dy((]]!]99+2e^\yh;] J|q)QWi3(h͠wut%δ]\+*t%h9"VB rY8ۼ ] \읮eԭk Wu̠ʏwNW񔮮RbLBt%:[i7vt%(]TBʞg8&^! $Jd|zט\y7\Ga zŊwS Q{)uVwYΚY`OgvY lx=p4]7]8w@w:ىt]%N-DW2t%pYLґ5ҕAyҕF ] \V+A (JWWHW.gUp\`2t%p2t%h P:rJWWHWLշ] +*t%h;]e"RB x8e pUJӕt"WjqVt%p2J] ʠ*V; 흦i;ٻFn,W~YL|]> ; `iLSƍmeɑdw;,=\%YKVU:hǭ*^^-I%+anI{`xue&ŚtC1SE{ dU53ٖ&z9ϛoyANހ8e|IS{5)Nєvhv!MOsƴl]e0_]e@|ǎn7HW( X p5t*t(5 Xm]!` fBWi<]DT;zt%@0-+pA2ZeNWHW YOWrpi ]e NW-ҕ I*8lhuCR+jGpEkBԤ EҊ$-+h{*-tJtBt[+#P XAd J/8= t֭AV7n,<O*{vYM;e_T+{DY6$Up=/ZCedNjK)i/r/\K"ɂs>ȣ@ĨF[K^bV֡jb]݇X'|Wwa`P^ҟ^GZ.nF&h`-wO-mdq ̈=X ^,lO+%" ]8 Q!H/=1bΟVYYC/0=ōhm ZX3\(֌V+֌Rw+JwlzzF sj/@OLW= +]юmzEt㭡 WUFT Q6nW+EtkBUFOW]AZ7` f´2ZŚNW ] ÀDjBҶUFu QjکHW`mRW5N2\ٚP{Fi*4]EhҦUCWא8-o(AwtJ~y{*-tтi:]e]"Û+C@h]e-]!\Ct[*卧Rv7IW3̰q!3ʸXM{<7H 8b3̉ys1-jqX >ho:m1]{72ni+NړTė:lۿ-Weބ,~f7.8jHv %j%ڳ/|Z=YzcjQ_^^̝ZD`'P#0$(-VIjgQ K9+@AF16RngyW ߏBip?VNf<C,ǰ閲MjzVnz>O~Zo|Cٛ~w*n~ Aq{['rQ,`Xr)y镃vPрk4`QS%>b",fPv$$X}Q(N{w 'N{n"&|?7?Su腠w5wi'{l!yGTF_;*Vcko&fpKe1JGN(7 "w@mn{9̅ow][ƚw7?_l"75xaqA>ܖ֋˪;i'[^lvv iJFD0LJaqR:E{V:x=ܜnAwvqjvf&fKgf\:] 1~U^&r?ĉ'os?аoG"%)nz ?(@;UR W0VBN u5]]cyY 吊^4'>zgi.2B D xKÑ-OyQÏUwlrcǠ;ա^TR{K3[Ry(Y?e yQ5aK,5 1;,h.vrA`̼6|S t˭hSRTpP|֩|tw8ˣ+e=Bw9!Ǜ*ջ=F+P.vM=5]P˦dr j~s^HM[lvE,e/Xx8r.F)5eY$.F.!Ed'{`TpEm;\*.QGyCr,F*t "7Fd':eJAXBOSp]P=!IrԷ;TZ IoxuLX܎pJ/]LV"sBQ3'DBu#ZH/[OI޼6k79]]zd 1@wua㹪.￯C<[3}5ӒC$LUl!|Ӆb;KWOkF(ITQC0"8^D0 6>bŤqks&AX6/ta6a]8$KN:t5QE* Ib%Uss(o1"'ӈYm Q8̹ MuYg~K6mNDg$}_o~v߆"g__ꮼ+KˢwJ")"Pk@$ .QHCs' ir lܿhB1U AЙFL,q$O'=qt[dgxwWi0r·+MmW3>(Qj;0!&TJPznl $IMQQ'¿LI_Ssy^X֐߿tבGGЬg<<Ĥ .1K&ԟL3m֠3Y 1 @6&D$pD1A:y$)3q:S6HgF̨vZ s8(L\2LaG+\"}7&$ZuI@QiHVSMcrF(S'ZZaFb<լ3kˎ%/7I]f!*ϳvzy{B{Ḑ~m*TEv*`TBtzDz2u)GT֜S֜ShM?<НS)(Y9Ϊlb^kZ]u/ X\Qs'Z]𹏃V*' Iil M^X BZP޵$B;wu 0rX#gӤ|T)Q#FzGU6FVJ.A@be8-.Yv+" sTʻKpHeG2Z$Rc1]/_uo55rvqp9)dG yJ2KG K`y)@{"pȐ]&8fwmFbބ XX)$aKM@(maqƂfؤPPE&:BXYrօms _LXĬN`Y'>8ʃX#9;9 t\B9ą Jn># Ĕ eFXؑ.[LW -h|b ^< ieK}>))<%0:jBAZ#C6($N:[=YXf1UqRr9MsNz 9{~*)XB0ϣX˜@98s9 BQ{&h#8ndBCC b+׏"l%NzyDI@H-UK2FnFźy5&XR KɡDM2|7Y!۞ .wF{kM[{C%˛O%`RyZt.K1# %p RV҃QX"(JFCܖR6e%.RvA”l&C<9׊mK#ؠ5^vƩZd~pAQ *p;$N͓4`0??#Kl}P%2fILNN &,% iWbK+i(ʞOElJѨ`TduY(rH7n;[_[a4L+*HvǡR[,U';m3/q9'H !p!K^BrAa4^!Y1XUQtEHzHȢ!H2.NEey9aԗqdE"?e;Im;РtȤ1pzLR| H{11G¶kt3f@nZI.( )D`A4i@z*E.:\T3:&[gkPhZ\sI!EB{>݅\H BIމ@)Ň"[ChYaD"چ_&SX;'S-n(j'=:'~ɵz OGв2J1iY_BJ|.\ n$5 %W1QKY`D"!/))l`" g0t ]r-1Qp*@r0:aYzt)T搣*R1&dҹ7AAe &E';S*+T?s,-멕\&lQ`*[ttJ%H]8֎cRm8&}zrf˫ Vert̃6 B =-)TrQ\j&NԤA$2 gQ0&%峖B'54iy ]J:[ A.K1Rtaڐ (C՜$R x Du4:( i=gvâz~LWlu -'R)U@r?LkV_ r5h)aTiL biK͐J#D_6 5__߻^2)Fğ;׽a7ZB `s? GWWh֎tJ4 yP&zgsTVXpZ,nF۳wge_X|&z~{>堚 wY|m~\\Nj8K Я KZ#t ]%J+ h``)/iC/ oVݭk4w$!y,DO߭[)lz})kRvbL/t15*{.vY-BAN ]nOTllb Km4:I%\G?%i/4&)GOK]ӰhKASf#E(lq;Qf\՝6)ɹŠbruL0YG5<;[+]$8(cWV통 %$=NIU 6$Y8Jn'^ MDfIce 9{FI1rDHէq󌈛î[L6!6Ck}6T5mfgc{߇n5%X6ĖQ1Xr$%5^̌A0RL+j%"on:3HJqΓ%t$.^qQγK`Get9^i  S"{}-[Z#g5dxt\yb7}ޠ?W{*Z[k5F㏏fNć\05I02ҾScbV9 Bk euǼ0LFf> 6aAK6PjYmgy"s NI}>)BiGgQZeRQUAnAq{l CKhPA99ٿE]OUzѻGl;^ D~9j*JҖ,710qUȕ'# UMe'҂)FWd<!:}J ɕ3WF +"Xsv2⪐B-*T9+N=!qEnNF\r*ԪWJ zJ!)ډ`ɈB0" LRg)ĕ+5ɈB. "j%cWJљڟRt.~bATjx3լYoKь?9fJw}q +ͯoU~q} E)4oOuhS\ G5!o|mT>Nzd@KCv2˾wߺa PChnnx[6f5t5Zg͡@DWGHW_]$]]Hd]] aeRt5h+o?U@38] v5 Z> ȉ"ykۍ3ŞS ֩3-?ؿ۬mx경_z.V6A2s/,qKs U' \iy?#w֥7Y.&nN\.첶Qi1OG ?f;gQTw]yNĪc38b4V0Yc7"Nob}3NIIǬSoyU`^mWx-Nn?O>PiC/GFo̓+_i|JEW] ':Br>[u=j `j':F"`ՀWo}\j<-Ot]芕̼p3w5z=t(D+Xjj QW@򉮎|0r[,a5t5 ohOtutl\qZOl[ -?=ir1+ G7 W 4 ȡ&B|`^9&k׸y(\AWr=L]\ ZpIBWȡ@DWGHW.zw[L{5n&ܸh=t(ɜk+{,EW+BW@-:] zRWHWyMsWLq=fpYV3wb¡@DWGHWް"`x5t*o~1OtutF,d8p#63&:F &F&uqaja-tz>]%󉮎u&[;.@zt5PiQKx/q2gL>^2A>5Ly#4 BOa-FVNxydWsݬ}xh)zsd=dY{<5nh44;ES~''&Uѕ>rtݾ䝅?|≮~h]x}g SV`Gkyqb0ѝ M鶼Ct^#MqPwYwP+ӫ5]/./JM}hDynk1wWhu^]#XvfvW6mw 96,g\1E !pB}擔%>Mǽ>vG+d͎Zs~C(y{LUݽATTT (1O薉<<!<-j'g]׬_@0WhC|tߥ? 31@nڦ~*o7K?ܼJNy-9=D$f}Vϑ=l&oCD_Ows?quu~ \p9yד-D}B ܬ&$}[ΰx`f.6425fB Iՠ9q)J% V]M<1v6LuGhX~ZhXNں1.XU,o/ɥnl[-hsbtDqM D.UZ027Ќ[զS%{OExn ׯ߽EJ͎v14@BJJrf95czI!SmA^fh.G z-.KNxkπ𕈶f7nW:d[M1;clVvX*]IIJ!vhra\r6玡dcCȥaUB5zK1ylDZQG4"D~"iN׿۷?7 XW^Uü 99s,jpš9 *VM˝UĂLC))Vރz99cKPy:r_aI>,JG[lj)(%GYGw( }2FXKqeIVCIQQ;S!p7\"FSy,zKBP`S5[ЇL7MQNZc1f"Sk ]5;hYX𮵡l;v4$E-HHS` \,kFm1xF,WmUo|KSVAGCh(QN[<ĪJ|Xɠ-.dKgǚȭn("uQ6 ]Ȇ&L6vʮWgnpJ.c$X#lzTZ{f`@UhW% Z;wMBAqV*J'WX]b $؎~okҥpuT`IJdb!}Mh(pu OưKYBpPk@o&)Ci:|W2T7RK$XK2вZPB]"w|Q!ՠB+~1l a@0vI!@YPђD;[߹{f)QC(e֜ A'"g#~C7!wL{6)XKI!0΄W .L)lL/*A l*31P(q 92j$X=FAQQ`63 :XՄI"D5ڽ()xzfJi,yTu!!ѿ9y%`ˤD,%&TYr>"2Dbՠ BSE؄Z}FФi!JJ1Ewv׋qicƬsDq"h(pljʐvH'ka %}LG<خ3¦\mכ1Io/bn TA'_ Bl}k6=D2 o byPT8xiDe:Y(rVH|!*#d`1@&52*+2%qkQ4XVѲ<0{KH`I}%H`Q2:@vTm /} ,\.d!T?3 "by'S¶MRFNEOuk6~}uy$di&\l詌.#+tme,!zԥՔLB^,s` QƞsR:p=S_I0Saʠv]JH-QSeL%3cEX WR()Z3X-DYT3JBX-{Z=0`Ș#͘c';AC,J355ŌKL pyP*ZCNao<{qUgÅEeXv cF86lA ډ+q5ͽ n_k =lC$pL΂Jvrܭ[o4^pJ},edԿEn$4>F\̌CR[QZ`@?zyrsiߤ6՛\?IK%0(CIG[A'CEEH}iw&dJY:Z- ݚZS`ΓEI5AYYeBNı2#ba!)a!/Q[+9P.OڪR LҢI*WR;T*cX`aHȮČ*- =>1r `o٪7̰ 6E n+Ck)-'7 cwE'yDyŰax); [T1"@5H,#1dA. l5?<6&RM 19I9hcROTRw kѪU5g(kISAL@HFB>d@הdQ痝 vSix*[3Wqd-pڠ@4XAqu< ; Zf'=per㾐?Д n F.ӡרzRGCXe(6tqHݎ:h Ni*!_4fSM*wN8 "ˡ꘍f jr'E`4lSUey"UYЂQ\<߾}t >|Hv47)vqAEvpaJt<қgx),TFxUP!%@2~x:Ngi2}L[0OJ*akJ%pL2MBUo^C7}%ByfK7@뼎f:q]dye(aPL|h__ %%P 4WQqOg@%P tJ D%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J Tp@)%/vG $ '3J Qh^ oCJR)>@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P JW TKJ lIw@nwWJP tJ mGl@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P *LIKJ Dw@W(LY{%aq*H dP@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P J T@B%*P JQ}Z<_eShf>Jiu^o@CJq:hb=uHTKRxWK.\*(F!T= !*Mw+th9NWecE: a;DW,+thYPR+83tUuUAI%ҕd%~CtU[L֙UA)C+U.]`MUgeq ZUA) ҕD2!*Uw;`AkվTC+Ù`ug5ف{?(C"nCt7Wt ZF!]"] %u#~4.ox%v{є"n6iD(]V1#/yÛWx64BirȊ%vc2{ɛ&1 댒nk%zch {*7AG_D] ,l2*:QmƍO^踔{z= WW9C^6E d *||# *0+a56R1i#˾.ܭN ƾVzIv6h /d7lRݠ4 V=Ƴlln\=9Ȫ7儤=x !/%x-{P= 2[ЕAڵ꩒/~VpK[W[;]"] ]1e+l ]Bv ڗچi;C+6Ct_^\g}UAiҕT8\I{n2-nրɥœq݋_ Ogg(,!e%ȭ"u,w"ǐѓ&s:z޿?Gs'/]/v@?IʺΓE,[J|*(St:@3x1_7ѧal+dG|UsY nbYKdq%Ü ,K'S:$2d^WeZnwG- CdtݝW/,_U]ٮVoo|:קOg> Y/}{U~SI`yΠ9ex~Mǚ?"]t_x;p7=R'} pw~s6Cߏdnk~ʏzl^ܺ#UϘŨZ͡چzzwwRڃ|=ֵU˙y_' hp?.u1 n.OἺǤvP.1z1"|4D omTD٨h uLyWiguT`n:%2NӨ7Je )z߆BYhӃU?/ըYk`6翑k4]vѯŵ[=\yxz y6 X>9^-m6dzrDj+i `ROOnGQ!9xjҝ*x * o_Fy&r `^6o:E+4mۧUէZJ[Hv`s7wb~xW9^Y\ |f+,P%QL{ |?9LGV ^?j|1{Mɴ$4N]ɝWIlyw c g4] 9s j;.C̃@u|Z_`iQk% Vc"V v[%5)pZ)CU<&Q}:g*EKZyhkck}~3 McҭwdKL[_?~uwbkv{!SO3'o֝ )K]矴hQ\;e<-D#-dl1bM-xBRKUxSdSeNFn艮DM>|7:dGm j :76I{ʸqm `$9:i\<*f H* +BHP 줠p ,#L$\|vǗi2Tq_+#ʖQ"#"#uR1-+<8)T\c%uV+@׀3vZC QZof)Z\xGjLI3T2#aF:MPxqZ-:>uJvE2/*E{kM^D),u 3lB@mI +Y#/>/ :vC2(l>t1N,.xs'F&5FV@rEpC<~DkP1Y[яˮQQ>5QUQU-̸ p=B99CɞYCBۭ3[SxRP*HBDO>1qܩdJaQY-grBU(4LjM]Jj$r,-3sTLLZ -mydM("r;  (A?m6ӛքSe+ r+Ew*ǸT@Q!&­͙8奋ڋf:'#;" tNZdS>H C1|BWwg؆&n6xh*W@*Js%({ᑢ,PZTX:Xҋ~+ZunZ޵@MY8MߗF Ӽ$G{=wt}cI!ha'kMC^ž.|'_^"_^47@u2̺|./-;6 /2>g ۥuE0p w̧>Ds;*dSo,̧&>,uNü]7Ed~-ERNo%0I$ʄg-WAM*vPw\(({(ǃGH\׮Qaǘ4Zl!FVEla?.?<^ %V]?I7>_b,ҺgbGvpݭm^)7hųog C<-d6 ^DD>"EĆQ⨰A8akTԠA+⤤6Nc;?,psϗs4yxCb1H6""H$Hd>Y.bȝH5qJtw 4ng>֞ӝ >y;g:{o"Ûhmɫh꜈ND^*Z 5 Bk{n/6jL 3fQr+21hmnE].:>\jrȥf {¹v*DN! V2ey4JL*LUaKAcT9bd" be}08nG `<)PnCFNChb (Ɠ3Rej?I$fe&M3ߍ󹡪hSylzxG,PkQ!Ð@AA'."!%yE <NcFJڀEQAs*6`#*UGng}|<f8bsJ;R6EJ^i,9L'>WslRõQ͑Xp%Z!T3C^.xyƟVQmmfͮYvqzO}NGyF>ycN.W)|r:_Qjb| JE92̔D麟Z:܃cBSie:Eq?whi)Poov~1&S) d%K[2q{@w!w)'umfE)(3g?}&ӑ-J`ْkonOU~V^;+ ]/ɹ1P56hu{I,Dn%m:N/p[N~T{}b*7vhȠuYz{0NmvK;^5v>yxwe=/ܾv3bM|L+YvZOҨ8ͷf5iSBl13Zx6H `EWb˧6* f `hf)1H-hygZr9AV4Sf,=^|-x$ rKVck U cJ#P ߜ <Rs@C>cdOyzcL;㹛jgqWv;@7%{a_ԥĿy^o0ɾ#n[bmkq8%7 @Kf0??h9D.x`!Kؤ]6 .0c2$X/J,Dm }bJyR~vYeugH5TMfRy0#QlpI< E@`8AS'~TopL-p `k4_OhYXr$d$Rι 4DNZa_Z8JHP\n,X稫 Usb=+g- ;vCv6gʺߴ3۝Vu\umttW ~֦:Kٹ?ZT1QZB3G͘pRHe3T14iPw'Tx}fV)//Pk]LnUm'87 I-RM,’;}qZ,WSZlki e & ¼&BP5; UTsc%bDyxL_Gl߂NGDϤr8̀/r@YzK8Y!g۽,?>!+|ξA 'IlMWq{}-&y>-`<<[81ˆ*~>ޯG.ۼ{j;|gv;N`C-o:Eրa"uvc"5F\T) *q0y2l^@YT6Je!٠LfL )&0;mv` fTl! *2a2hT1rQRT*,԰MWis<19y>į#ZeW ?`6vkd+s)s'R8/eF:i2MŔr/:PjNv9u ϸtYɋxzQs>.v{2 #Sa S* DƈR:?C9ac6cзʦdzYO>͏t:#~>ө$Lkj7XhFD("V)8oy\j@.Z N"D"~' N0O0XKKb3JƱh.@ sgVƠN?]Suڧ so{k޻6C?(\ORYQK#_zz"5i<@aGa+wѠfe 2xԼ`=ɿ|{Ҩ{oI6߮RP/o^ l:6n};'LOIO~f/M^/F2>d5u/UFT k?a3imhi"[uoU-Ǒ5c6GPPw,QScso>#k}L6(HҨz՗+Om%*}v;<< L]R ]vQuo-PE&=3!tւ7 x:?~7E)ɩym6D#4Rp!"1oSN"t*үVuѻbFˡ"nyٚPnzEʷ]ؓL m?fDQE AVdGzn"hM%3Mh̢΋5Rcs~>5Y>н5#=X SsN KUTb&UXPcAH`N`+.d>HXGx# I8F:QFYJy>%sKZ0o.MHCG8p/*՘߫ǕyξYyW+!e׍rќE@6b#ҁӨL bIpb!bnavFqvZc:>zT!$U4(W ZJ TW8> ],,p:!zODx-E' P!R Q`d` I:1!4d{8OgꯜiB$ѩd*3c 18yGqaXF; K~?Y~K="8 lDqh9nMgq[3aD90OϒyXT 8-2R-lf%Y̏ߙ;3?Z~gs 5|(&4h09 ^1C$Dd(00j<۔WVG&1rbGb?D 8_cwɬKsr6)C:}mpiD{ΛP j"_`??ݴu@ˠ 3fm,̗#+}䒑`am{Yc܇K cdHʶKҐ#i(cpz{U]Gb>ͻL<{3޻tnXDG)4hGXh> WHR9%FaQ`'($t&_H6 MTX֐O]87Uࣇ1u1ąO|6&tɏLQְ,XZ $qpsg=™b/pfV:MAV$:06j"S{3c)1@6ō8`; :ʓ.I!% 3Bnc9m2fOR< ].s2ؽǛ|JBRyPU\~r=TPu,Qx*WL݄i,ݻʺÓ; *XOH(Y ǾTbnݬ=yh Q Dr8S århÙƩ4eҘ( Ȱ\njmE !IerƧzTj 8Q!(8v)&8 YѸ3rNwq5M]nOjgIy B^ ZR`&g, eQG.D&O>ЦE$ G'T `h@J%q M|e\6!Թ[|*:q`S'dG(o"XV/*WV,A7P80k^'DdW ,X."8R=yD#^ݯUPh+7zs3Q 0Yߦ|[MP87I^%ӮsCm^8:l{ɫ(fUO.GGLW엻"ѷ)y$<\}RhcRLCa|)e99 Dx| #[ˎN+K׵a ~[`!re#CqD_Ik: j֍./G\[+ D1+P6Yw7kNj4j⋟߯BrZT`{Y"PUZBKGR(4ŖRm\y/k?ހls6f+˫ '~wKkVcV$AP߂&rtrtG53$Lj c$RVA~\U]Zl,g|SkSEGT(J6f[yTSדARYI\(E 0W _!S7嫯>ZP.0kiC%/*ɱg?vlvպⴚ6ٜZqtSB)y@P %Z TrԇՀZC5RjCi&)[nݪsM}:<;-Y1e_5>HIX).hV5qwxIt)T bϛ żEyS3vqau0Mx5Luy C1v0*6 6R3.Ta_[$W <盪6,B!(U; S(D0g nHJ!QD^ n-?>FjJ]\ݏ`EY\݇\ γQ˞I\ݏJK\{+݋~zU"W\IE\%j5wqAط ^\="Z3H\%5:qrt0*QKоD% Ck~@ L*+ءD{t=,bbtr4F16iږ0|4ZMo.go#TؔH!R2JyizO@UCGD/ύF<)heNs&$Q^b9“o|#30aT I6:(+lñ!8&R"}fD 2k'wZ|v$tՖ RbJ叫=`} F'+#FYFh9sY%:ǔ` XS E*Ou"B LArV4Dd%0l\ש;#g{%dob?|_9N@N\V_ ƗOmJ}]evMM;#4HS/Sad5J.:ع q/$w=|r_m%ҹOUf Vm)Y3r%kxntݻNMA1d|;s@r-# IDԒ)E 9yAJPm6ZE`v^\ QVoj/pi.J:hnځ,1aZ[,`k U cJ#P 9Hys怆| *ۈɚQSqݵʻ*.Q?h@%aX̠]:?!=.awof4iշyn80lhQ| #S54o.0pm*ebņZICS}-Pj]RR=R94V2o^'\Of)yR Kf+ R.(S}齾ޞ3F'kǩKTR<`%SJtIeɥ֞N rH# be}RSFDD b &:U T$Mr2eW'/4'x2x_q{y%4kZ\Ϋ6+'-PΨn$PkQ!Ð& Q&i7A@'nk#(bF1ZleF͝QFG J^TBFJBNPL2]'|'z|<f)W_9sCZ"^|&RsRt#c56_jUgJKc.;ŝ2(:oY*錵30P潖豉 iwvFN늍ATȢ&Qn@S@gƌ[ltN5G% D{{;$W^A/n8?^e[x)=~5L'n~@%Gy'I:rx$~MoaAhK8rBɰ(G/iBy4/fhTX[_Lt `' {k;z!JCrï'pnf_YJ!$HW]eH21Sz9nQ#eF\5L=d2A3U$Pj6쬺<٦%ឭ*v[שs/Nw\\+g|P"b8(a\c|Y_;YFӹ npZ{ޢži{=$WWhܒ j 7.i5IĹG96Iuv?XǸ%O˪a9-.lK\ BZnm؜=ܼ)&y*,zꖎkK gGzC5֜O޴m՝ԙ+<%nŴuaR b-mci=q9`BLJzV}YOTJއ!=Jhw^ _?\YdڕͺpߧX,Qd eJ+gI1 0 S,00dG4V(\*NaN;fr;eH/įjV͹jcVYg_Zޡyҷ}uNt!k[^{&ef!b D.bL;\R'M"S)RC;r`{D<"r?Jػ ܕ!Aa}AGDLy6"XU$"+0^? vn}ax9x6o fnV6^w6}F>-~|I oЌQ:DBR*pq `]$D؉DH{ƽ`3'i,( +gu$0Fu&1EZ I`LpIqMowKx/#}S@[,uwwoYv@Q%ߓzIv+h8d)`} y1dFk (tV(ôB<dv{(zz$^ZN#@MB hDy}ǔv;HɓqiߟHՋ[%s=eAh4ރFFfvT@TH/WTx;_{o`uhmG!mD u v/u!׍nw np1-%%1uA X4ZHAARɹN+cP//wzH<4sƛo *᝛\OR) %מuBԅOKW@n5zS=8ww] Y 㛋qIݓVݫv/t_Uuwg)7P-a6wsQc6cP|`>$?trhl4a \@x=ಳT#줎uG~緻ŖoZy=:[E`8}Є!$:PN]oW)|)H[r`7, YhgC+vό1Zv C&YQ "Ha$ @V[j]^^/^6K:=;y췕?ظb6z_(R}zqgQ!ۓƘ'6Z bWfdgԀ99cR/GD&F{0)PII IGaq-zS,b#N~}wh?JAQQkkm \%ZZ4ic2Tm Fۗ#lgKF Iw4}A^UNͼy]DiV&m7|HT^ *$ucJ |) 1LuOLc$JQUtW7xi}Fͮ{幷xhGxhn|b}Nz3^o\X k,qGE " erP\Bed6"cA2}c<<<2uR]o6@;R'0zAk>k-ǛHc) bȺ)ڹhenɳIjp)~pve;|>Nvw-#/d$H- >5A֔Wh FJZ(G-fMI.k Y({aeD፳*{)_hC{o܍6jp'L*yNXi[Նa6צ*MEfP* DuJ2u%= I[,D x4f4 F=jmeRCIJ YWz瞌c2jp4+kM19!e̶_ + yg5"'|JC.Ma/4BfrB][YXoEߺa$%*d~/|H0^y(12"{ Kʝ|Fo*tS J:$UP9 !e*@ʅ\^ HB׾hq6U큷FsJp1.Wvm'z2z%Ycl$HY&(]lSe 8J_bpp'Y3(S4E'˔ /+N+d&9:HFFl:$hp>"5Y`M^H[duN*W,Vh[٢gR FOBrC "ea|>{j ,w=+Hoq00T CT! Pu&d*JAHF[+tE@i3&%D-ne廯'%±i S'IdNh5xo]3ET/:\+-|}HM*~mkMămڄvnXvu0YMl~n\M+*Nf;j}vrҞ{Ne7 ≠[E=>YsKsӷXyzWׅTu[Oͬoidֽ0;^0${ bFzDZ~XѺ/S,X$]cDHM>6!GISyL)՛|M!T$d[XE9luI($$ut5kG -DGgb6i<,[<1󠈛"$WLw!e:RDthc4 SS]S $( bپ]#2/ x^2*>Yj Yi[5xu!'؅1ҥuV1,fe tpAF\m7mu{[垟"&*gf+~j ۳<?͂u):Nm^_xz˫-\z ?.,E?tW~^goW^_Fv7{$G3{K_"ev~Uan8In_#Ca/tW@k7.gs3XsN?wYa,u5̦6nV6a$ci/1o(vL4Sgt[K6Mjwu+:/V$n?Ң6߃$s]+AA9Ktob7աFCPÓZt;j{W (B)5}qǹb*}iw?.y~bͺj[ _%F%4'ٵOw16ߝk3}QRFK.X U;!E;SwѬCM= 1t5"A 2BtEQE*6\-ؔeRF^vtzv2 5}e١ݮLG.䨾 F'q"2 8(θE:CfeH3hؘ\b.!˜G ]U/ntER&a0  eH`/a uN&r(RD \RM(cus&Ui~64'lr=Zdrnn2__u}7϶ *o)2|{XaNN~/f|]k]ݑiB{*Nj툴iY i.SZtj [4$cCs߹P-5=t ISDhbh QGYֳ\|sZ%)irqa$Ic&t)ҚҦ`\.yZ`GgVA/͕{D]{[Ĭ+VvcAϦ|eOs-IX6@(ccS&( N}J (QA䕈AXbɊY"%iJ#zFOa"lE~,E"g^H1HcH"rP$M ƠcU~z~NCJ91+oIljL) ($E*d#YA:R1~<Ԫ%$k?)$d%H/#gEMmthk[|()יbdy[kw{m9ï?ru'U9_s)|CN"XK65duIE2';I'zk[QTƯ*z(TK@ń)RLʎu0Ji C/{?ҟcY_#c׹g2ǹpsFmgYdk۝|yL.ht:?=i>-SLLvSf:B4:M"Gg&%ub*ًY ӦvAg20>e1*`nwĹ{,_C{ڦYی`o Z;]J ZÜ J]QCI0!,Q1aoPDbBoD1{i+F[wm#Y0z? 4fwgz`LA=cudI-N{/ElId*&[L!UXN{YHP tRP .#,#h&cvf> |X:xWMaTF53c}Ji!}OX%IEnް(Z9 t7i %DiqS<fBs8s /Ns5)Yʋf^T/vsk(RǠ=! >{u:F@%9ˢBMڬT>5>@avPBRm'M>i$&u_g#,DCºC?@Kiؿ $W*la()ͽ7uG_Q4O d (A|]mQTB !(f ѕxR/FW@L9Prrc_dH7ąUacN{O}͆9?Z}Yb$VKQc2w/2^ƻEK, Şb7[!LZfz:/1Yu&=e_ѣV0PŘymOHJ'w$\_Փ;fyrv)MOJ5tp_9t$ZKNWRK;b&u JB&ח4 +8eEt m ]!\uhM+D):DLo[GWضLBW +,em+Uk ;T]ZFBRvtut@62pyk "Zxu(%7]] ]i.d-+mk 5rpm]!`nZCWW&Ԏh=t(]]]YA(-+ڣ1m+@H QLLF.+a7LP{e c*w-;Rɵm$γ:p.&0-KixRME%Ni = /դpBŤBYx$"I^]7XW 5 ӑh+M< jZThNmz*ѲEt-+ZCW +&Eti ]!\MBWV{ Qr[|񮏮r.B\M+DTGWHWB2ڤ0gQWWUw]] ]I)qj ]!\uh5i:]!JӅ/T֐ѕTm+DY Q.vuteMt9iB5  +Etm{e-M+D:uute%V7NzT7_ic1ٝ󵏩2k:Y/%r(¾bVt"䙦ZI'eVqT+ӵ]+7|cHWyrXǓC5S1M5'w_yrZw⌖`qI㈾i୍c!2VWm2?[3DŽ|thZui:7k3;daHiRGCWkYiQ?wu vϣ1dYj߂P. a 7p~2? l<nr c .܎n}6f,($¢ŞOn/ $~- PdpRzo ʆn0Q,ozՐ`s'(ʾZdZgju1E7w`-l6"u1}E/ZkF)bFp=I?]Hbo/nA_xjV9_Ӭc%԰)(Rm~`b &~tudXS:= 5-lR\ ,Z@ +Dttr.^ubhz7T jB _Po{sՅUJ8-DsvۻCtnzXL8 iaqb3 m׋(Å楽uq1FcxT*fY(>RP$7,PhjL0H7A0-d)sj<̣Ɖ:*Sfs%gx&q.[. 󺸴wOF_R8M/ǠE)xH[_^Grxߚ4_4gA3a-*:`NMqoI2XB2i @!{Ŏh˕w+'&2B2pG, .GZ͜BIk)H#~G7Zs&sr~Tg js@G1vg7l҇)(p|W>bvs+R{nDp SÎ 4cΉ:W0r"VƔy9% gNHyVB֐鈧Mf1y C!)A;SQn>2MI"f HuS \wav"v+3G{=1Yx4,8g1WNg \\騄4pvz^ w~Ggݔ;b9PG= $ȚR$J4 QLGSU (Ҳ<\ICb'JZ5ΑƏen߮җ>g7v? pOI+{n';ѱ(V3ǫT=T9-e W9RK>~Jr:渘<GyrvwVϗ7geoN^|cg#.jxQ"8=!̦cWT~Q28=U?IqfA쪠l4Z.S?FڨrTu{GFb~gQ%v0#imE_ߕvON+oe{ܕ^e^}u*.;nptܬbz[M&|qyWN3[{lS[_~(는U"/J gp]Tr12U%,&.F.gC>]#pT9i⦷ ̹AAPՂ=s8'atl_tClzfLX=/f *'t}DSZlߟ;$Ns3j0PvϻJxfHa/|Xd R=o%udiaGL̴S_XfqpYsD1.sj zs3?^x扺iJc42 ˉh:[l I夕\ CҠ2X9p*}7wasoPMjp3_cw4Բ+eꊮcjݲ=Tyբ#VpOm"Z);eIJ*N BD'tK {=HC c1M o~lл6zKT1*VOר U[7][Ӷ4wGy"+cen"G.~OLҚi.]3] SpRB_tI I8WpE!~iL.R%%:먹l:ؙ3u 5YMTK]KLHdImyGNUXE ߟq 6VvQ;I$Jg8R:0`, v9Vs!KF$[DعOA9kRg>1wAZ^ S+wo|:}WvʮZ}׻^KwaK *exxᅰF m|bY:OL (}KWU5UU/j{W=r2Z Xi.|Z=34 fhj?Eʓ|{lݱiXf?_bh\(GuN.&eJ-L 7J5eqvK].̣94d!IEJ7U8Ic\zw47a6IXv{fݓ%ChzE2#AHmBpIa)nyv.q"U5ow@NPyx)1%c^'F(ɢ.8^">.W 6s 8[EhI;|Ŋhl]e}G)gn5V%m<؄%Rbނ[Z~|05u)&qShh5Axgˤbb-R=))d#cPR%_DQQQ -UXjNe3깞b _PFK.[YX/u>P]T'<&%618),#[ljʓUiJ&1kJ7Feq(,ZQA3q!Ϭ1+mL&)NF_9 䙹p+9J'8]7j FR$\d+9I+~iI_R6' xripx)IAF?{F俊vq Ei ;`;X7;(ڞȒG:wb,?bَLJjvwQ_AxNZ$d13dec$p4m4m.!@ʵBh *9iJ.F ΪK &@K*&:V<@0KFNk+Rs4;ۘA Yxwp$I)sV^|B:A1ZS*A#$pH,,;ncdBDUE*))) `H(9iyK& <¿&:uw}θ8'Lll4D4Nh~!s:<30 in;3'tz ә9h`LSD&Fטv:p$w]L%]Ё K(O$2Ԏ:;Rhcb`(D,}4XH2J#?:p9#RhR?]D;R%6n=~AO-_R5)RߏZ@,2LN!xJ,uUxVhw(BDTza1%$#d L k.R'P3I 7&(%twIB Tۛ+*Dz+<6ޚE:Zyō1!AL+=OK\6S~}|g,2,YZ T00}(iwg\٭Fp I̴AD4EDO(XBfZO&f \Tʹ/T )%(%C6g$$#_6 #.I1 g.qf3rd˻.1{rt|.r%ovZȵ6 K:h Uч4B&P9j=1#^}*k*a?A ֟(}I塖*I\?gφ؋ZPWF8g~ZtA##_^J{~T f|:a)B3e}mR~DϿ=?@'/=z!׮Uqsy0aYE|zٰvڛ|w&*OOgg/82sr-/|!"M9imz"Kx>8y??]I=^߰\^LVpߟkGx2ClK"%FwIgہ مk=*))dU;c,cD颬M:bNH&R֬I. yBdL.l-MGOipʛ'E%YT߅IhKF2F(XPaq). `B4td hy y0;I7HR`F MݹAG Sq]cŜf41,zf \tPl.ɋy\VK%&ڂFh271&Qq?#7w-r3hCKe av2 4wDGFFjHj9KDQHH:zb,8#I"B>x ND 1H!VKsrsI ʯ.h{ѿN9<d5s/lw_6>ryJwK&"p>K37?j-f//>c;[7ҝ7/lmZ7, ZQa%yW<}?&W=Ves6^_Dtڝ _'iypzoguXb5U3R׾֪k)oC;9Jcuc/ih}?OOV_|s]:G ,IgL]zڗ_mn3C̎GU Wb?œ˗@.gsX,N?wYaLu=GJ=i ¶^GkiU:wN;&| [&!Gaݎf1`H{~X O+t?IY;,/Ƀ)˹u{0ɋ}be>sWWF 'tV-}| \3^|;t{|s{^=%6>"k(S/#4Oٰ[m zIA"nQ pvm|XGklY!EV@m$lXYV7E V%DoML5jlMw=|EǮ)J(S[DU"$P6IY*刚xiI';՚ص 5[ȅsu˰jy|Gg'a:=V^Wtҽ )}()pڹO"Pŵ|i!ڦr:AvE!hLbv^x2Ȣ#!( uNٛ(R.~rM@ƐCk{42>d:{@= -z|2|7r^įn~^ogM_7~}ZaNOq2m.anɴsaTvDZOG i1.S:CYx;g"@I`|_˔:Ȏ$]4wEFEgtsZXK2Q&`Z',*`I>C&Rҧ`V<kZ/f\}BKM/BEvivZ]BlHS҅Dt#OWHvVtnP;'dpY1sAb9f)IY:F"(d1bM6Q7=E@dMQ5IO#<)%IUDd)CMUz?ȹ"{2+[FsljL)3^Q _2HL c!m$V#=:Զ#@DV%/>)Ng~=T 2F;QԘQs;ϐΰ]du~W)-)jLu}[E9?ɓ>mG_}-O|y5?M߼C9BB oE04H*91I:9j*-s[%FQ-ֹXP0+Lٕ)[ ^*ȹ]2*Zmeac2GYpMQiE/'i؛ow'__r 0N?#KlB EJhfZ pŦs4Idλ|hRVbk 0Te/d%tJ*6d!ZȨ|ºp($*u9[sĎYbn:Xj(G{c PЊR*봱F9h> 2A_r0.(F6 (bH UQN̑VI$'%j 139GBcy،a:ۋÚgwv+V"ьq7x$KJ!XXS6ƐqG ¬̑rmI!,d@K_,JTr&0V&k P[ȹCU⩎,݊vll+mchG8k]d-s¨L $k98K 1g%rD=Ň"m!6<|׾K#Nvwp-ăEy'riR^\mfNTL-Q;'ltYa yG'ķ\8Gʛ1߻g=|%H 0 Xm"Qt*Qh^ bCPM9RYI2ieM=Qb('JRȹ:L\̿zʳ7НQwC7լnQdž\Ky8A 2'].ErĘH8CadH)'ŽZh]U.f Eg-Ry( N3yY$\$&f1r~Cw/oVD|*B׍ХܗK\0=>GtQc2[&D!"ifPtR"uQ9HvEV>ɦ4-y)Ay DVK2Jdu`*IvoF|-x1Mh>[zt$oA^6fDNA ͑m.Ffxm(W!EY{+F+Ni]bWrmKK{Q;mbAԨ2 Q>xrsŝwm$W~i!``'$v2l1{n!%D] DTս[}i(*ƴҚr'aX߹kO*0YUzd!Wąs#ŦdSIL5A"O_Nw+'su۟{Lc8M~\ԵBwtʯ[/Nthbf>{;Uهrf-/*\\~F+s5NrTqo2QF]n˧=xPߎm{#Lwe,6xD" |1I@Pk&c{ɏ{L>],'!\.KGː~|$6vO`Pt - e-gˏӍ\d(i4=t^/-b~=m{P8ueWl5-Uk[t AhKnE*{?pi/vqJ۽' S=]/?r1KƇ{ /P~qۃ8YG>l4zom.6;\x9Y{FVymu-eizVl>d5`73δhrzmɲ?SgHi n?Ke.snWoޠt7' Tt)>pӳ; zͬ</ؿ?CkE*|JQG_t @FwĒ@ui8tvKNS\^/AWVhOKC}m?vz,:y-Ws zUnGtuOЭ^`5Po'8fv1_od!>nfZHinD6o&EjJ˳UWz + ;曡++T+tEh:]-$G:V{+X3tEpu3_]J;1ҕrp]`m+kL+tEh:]ʍ Sƻ lG]up]+tEh5:]JF:B2r++V"BW@H;ҕLlttEjof h%C+BHWGHW nZJXv ]Z%NWR둮zF2Gͻƥ)O*Nj'o7̺E O:21j'LN~דЌ<,S.VsZ<볟ntYP֙d.ru]9pHxs@Ew]Oˋm2\NӺphPt\.GSDc^O濜^]Ӎ#. ?W\8FNpre`]\J9{eTKft PnHQ7C/p&S^?tv/ZnCWP:34G+9S^0`u3tEp]QC+B9^$sJ2ϭkW^ZR)1ҕ\kَ"ʶBW Жt"t5DWoU~tE(V=ՋЕ˖vA c$k~>]J-G:Bk,X; "*Rۑˆ lG]\Z+BkPZ5-+zЦڙ""~ PJ6Qq'VN9n[iUL~%lVv_=]8~^g5FKBc/ʪ!GӅCOJդϿ= 3ҏK4ε^0i㴊B:Ps17LدZ2^^A^<ɩ1{suSt "_{;T|#+!!"5CWVz;tJHWGHWR*=zΚkCHWHWJZT2\CWW6"zc+p-+V/V h5C+B)HWGHWF Z`%M3tEpu3j~(Bvz ]'+B)Bc+e-]`NuLc"/F:++)d ]v=zOҰF['لi4`Ve V-n7fq =ƕ&COJh ʲÿ`T/xyI/^J90ACϵ}]KWk׼KDWPڽPڡ'5ՋЕ0Lؖ TC+BHWGHWٔ`eM3tEpoVf#+e!`-|3t/QQ'R0LJNW@)9JMKꊞUvWf h T;ZHW/BW ZRWD;O V ~POUZ^2`_tEp}3 @˹:]J11ҕNZ]R"BW~ (5c+XP0 Zmpo`P*yz>6֛V6h.Q6 Z%]c[ǒv1e~cb^+uGeQX;7hCL USn;~~|WcEȢqsj._,I`3̄j1",DU)Vw!gviPI];7S a$b?3qXև.emغӃHWOz _|ԭ=X4bc>zg0vz=GqsVJLǼZ &ZLWԀu3oW:}X8?駥Qo'è:Cޮ jqЏQdYݟc{+BOˣ6Ig1k']pYmDYc~/WиY8ί/g01J]ևH҆tW Ӗ97vFn||K}޻tK|=Yf>W#՝K¸'ذWQr@}* -ʇ;n~BlaiV7ρ)'  o'zJ~;=0/\>LWӏ 6}">\u)H)'s#@#3HaK: m2sօE-p V>m\'S$NOMʆbߝ7BU[VbUFBHU9xxh*c\<xrV?cׂ{O>L 9l'P x qu~Be6}e&&`q.RYK IEvQJ]L.B{˅Bdϲ  {I$bP V)/3*m3xnmVL[2|=!D :2X{()x + 2[( ieSA115 (_xXkAsHSQ3lWN 噴{5%%k<ت6dJ́d. si÷*ޤ ,{eR\Zo(ZI+UdDϘcr^%FV`% B%X i_59P2TH6WDJRV* R)zg [t:K& URcBoW Ȕ6 iBA[# 9ǑAB}a<*#o)Be̐|!҄`,*zY|Rs<+fH57]?(c)Hx(!'X !!@YaP:ZQJ'Υ*"An : Ü/ -TQ6%O L+Y"VR r"2I_~N! ro]B1(J8A1n+XEF@g o+Ex@_q-TE$VHeW&"[VRª.R T]#"gtW+DiD0PRl,@x@HtvXoVUWuDM.dg+_vh׽a,3O3fUy' GcE%e܉s$B:}lݙ`ne6ii:3s UA&'x=X,:ZTkH繨) 8CV  b31G05ks8)C^b,THs1c4G< Z5h+GR(]&TP=B!T"QZ R҃0V#wkn$ !>W&+lŸ"0r\B\$\1BvC&  ;b[E 1RŢ45zxԳRuLc-P ͏!s6,gɈSZ]FʄVjmR#uԃ*\#|r\fR,J@LTH#jJҸh 'k5k$8? E}057Pٗ9F 6APX?j˙-+Zk5 Ѓ ~4BSqc8|0G@ɌS aPb.'ܭ_UD{ "`~Xf6rIWŪR盉`h.ń12 7+cJ8 Iau!Ēc z L( _cy]Ztj+-Xx3 R GuWKfV\8/W}nXc*/0;s{0hI8br￿}UT󲰮-haxZ{xr L~l(bl%)V vyJW guv -NVjX_i.t3io[12&%Jz\^,NO3|V8Z~٢<_,}֏779|̟O7pok:cYNއ)r0ޕپ*jh jX=L@U8<&!|[#=:y$"CۣH?UI""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H "z$`JczY3+`.@ZjV zً$F@""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H "@Bh~D$ xH 돇j I X)@/1E$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@/R1@-" s8֚'`DDH@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D zܽjO/7 }Rbw?I `.`c\x^a/j<+%p%K\Ӌy^܈g^~ƯV/ד^S)x' <ѡs*ȋ+a.w\~;V5o?ɿ_/gz Ѝ~~~'p"g'6w~z8apDYjACr5^RcHT[2!d҂ %cX)!wVVVI?UNaW''8Z͸5GAֺC9.;oy Z}=+Yvc"]Ni[fεŮ\Y[(T.gRᮘTBth\L{/5~asZ{le/[^t`Zɇt%~bb宠#lӒ{Wf3/^wڵo6w׈˹cϧnnrs޺Q`$NOSėӴ`S[MWߗ<.-yT$ `"7yrm!\L;]~vhr+,KRE)G].P}ѦZF-19r _1D`̇TYodFPܵL$Rk[aVޯҗGs c2c͜,Hn]6d~%U>wŴ>lv 츩Ƿ4şsAVBxKRRʬO1c(qAyJD8g~| <6xQ䚴<`Zx$Gcm/\y1F<([vG mh2k[!I!bSmU1$ὕfQOzc^XN?YoK>C841tQEcGvx8I/[^y8-ײ'fW,,l_~CΓ}_unIS?ήsZ+Bxiܳ&ߖYeJ$ϧ'e>]?\GpuZ^Żo)ɽ뛖O.{><^mat 5$@UmzY~k4 }.!-DϺN4Tq{> ׫N:\6$.6qdlW;]wGp\jN:X܍kJߊt3>InI;EPȼ;l_r Pt };_,Ox4yIsmjbSoلuw3vEf{ʼ}\\N߯Qg6)ͦ]κ^""xe]69Ũ Y'DfV,A}m/F%#B9y`|Dw{:j⟿ܬ涽ꭊ :&5֚LQ-at,"Ӿh=c%TCkUwHy/h}Ԯ^cq Ů_&v l9o֊ʻReH bKa8F bgrk(R}H P\j]iIq[l}<.?z8wGy~ hK7У6g>*t[{3]]+q1q.FLඖPP cd^DG':D˔ 5L&Gc=ɑ7oALԘAJ.AK! a@!<͛O?./rͲH^ujsRLwErXE4wD4{oz#Qc.u' L< \@*qJS\j RY#Ȏv,&8&&r:yGry:Xnzç2px*OS9U>o !*:S [rGxXD#>'J?V~H+hVzSVՊ^C 1&r Ъ%X:a|R: yZ?ԁvٯxݕB]҇Evvގ/w(j&kjO9B}xvROH1K ,W3IkBI  g9hĎ3M'p&Bt"x@g*] !;ȥ8hH\IeK'it>3]%NjoGN_En!uKe$1i0[ƙN\<`rnvYԛJ ũ꾢֏ UŇCV@LW*8NT1ZaUn-ŒQUTXd}%%2")Z ]j>l,tQD]5Fb"`@Q7%k L$aRS蝉ۋ%[Kq<1-|RM搣*r2\̛}2dRvvTV!#Ljd mnv Ib~ڇsT`ԹC-'e*[HϲPn GOF@s 9@O&t.r3CIgU٤󠍂zOKW%7C4 R3t&$/'Y}f"7c)Y~j H.t^L'E謤h 4ƞeCb 928$, X IR1-}]z+ѲYWbͯΦ>b^z=XV`O2<>i[w*]Eø7D }Yts6TIp,#xr2͞z{f \3^|t{@uoۧ}?!YbK,F}O?6<ًq[ rIABKΑP d8SsƏ{rͿqxDd)3S"JDF;OHV7FD&5Mq{>tt5,. xR8o2 Jd.XQbȘVY=Y:S?ajieg2Դ&怂3}p6^稾 z'q"ޏR" ٘xgR nuu}"foo0 \f2T;S9r%s>P z7,;f@Ϩ2\ ŸŖtjBZ?z_w Y*1?A>Qw\6;;&_Q,FIǃ9+ k|04 rZnߎ'W{*o_v[~%ñ ro/ƪAȫ]h4m|XkmOo=z"B$/& Rr FBs \k5hGS WJa}\QJ娴[sւsfy\JE檠"D%įJIxa}a;]TCFv1yM"2CjRq%x:gUS02nS~pz]z8I~?oo~?/`~u~WDiN^[Pb1'*hGaUL\AX n}XE%iP=F-ΥP̴pH㓲Iʳ^c>f)$e1rX:k@"A9餃l}=Yxhb >ǕB.2KA4;h+aL `!YN=j4J72E!顡vҢb+"l%>OJEU &!Vq/fcˌ9Ϧut8iu~N]߯E{s1;wZnw_MEٜVK{˶R*\P'"gЖ p RVQX"(JFCܖR =K6=HA S9+ \+^"-[#gdl8T+d/ * l'YG̮gq>AGhYYbĀ*1{Nfu2tJ8l61td u+% iWbK+a(ʞOElJѨ`TduY(rH7n;[a2N7T̮HVǡR[,U/{{oܢȜUUVӆ%/!Ƞ0\N^[/ِn W| VsUd"]8*hh8be>a&koYF~yX+w8K+qD-KDK^"޷ JLs*Ǭ$%'Ojpw`<ks(lNg9cƨ.2șB6$I bqpk;;[었WHSH.Ϊ%su*9T.b/=9@@&kHEs!RPw"PJrr+q#/eڞ'CtJYr1̦qA9\)9)G&'˳OzPі* ;Ad˶`uw,*cm"0&3|=XKbEPК̥}ԙ7c-oh&{ٻ6NR]wQw_\hQ tWgx ,*0ݙuO?J ǐJpVNxoMS6'Bw \3qMEĿկ#5BĨ=9[#׀lu~|ٔM8 ´5\ulk0r*-f"|ǩ>oXlWMUVG͘\d[v)̸jֹtA o?Ŷ|⡀.?.N6YfIF0 Nv}By6x3 vs#i|E;{.{SNx7=3Q=.ӭ<~3k7daL89=qLN'(Ѳ&{ڎiDpt؅E\i8} 3-`Rb y+vٶC٣~M:hUδ)f耢 SU 2"TaMR0,Yydc1E1/%%\2NAAbFbi1g;Ja8ҳ7v6_rNW:c >Myj8,T9/Eύ&E(P%P;,o޾آiJC VV=qcPB |#7R`Ϩ0L:v|]ϥ_v .) lǧVXJ v6*+幨ĭ>yu\=E+&32 ]Ȕ_E WshD* ~g𷿾}5&P/0,2$eb.%-$ mz;<Lc(> k .|-ZMog~Bh"la_,b0 JܞJ\Ҍ^ZK$H]%Įb碮[񩫫QWLpAtoGiI%Q'x%|Y2|޿F)j2_kKW|9N4US)vNE +)RSi^Z6/kb()aɞ+|,u]ʋ=͟߼vP^۞.Op1 5~sT( 8-A ]T Ix9Qc0h+.krO/Cn*ۖ֗O׺C{{uJaN3w;so?zbN(/T,ϫTIaB ;P証J1@1gU X4ZHA9 Arݸeweg?_,/晪of>cQR1)k1Q 7OɖB! 7]۟/v⺓H490Üsa?90-7]n$9ˍĮ> M\/7^GRs]Rs%5wI]RslsWKju.Kj.Kj.Kj,,]R嘻.Kj.Kj.Kj.Kj=my8fp]R`F3'ԮxX* B za23ŃdG\NS>n2v` fQ>8$}0!8ʼntlhhLt?* >R-SB5I5V}F\'bVh ՉF\Έ׷N壧.wX1H+2}lss1 ]hqۤ-S!m%ۨRjkM]ѬPUm=9{C8;c~r^9ڗF pc_7B`2S"L uR 21|QIT'E9V(cĠ5"^U{ ^_GWd]LiE0ם o>#K&0 ߽_sGd*]hn7Z)iNs y;*̦/OѮ^PIv%ٶ96GyFͼ-Zvۼ)m%L+򤤞~%jUgLLV.3 sZ@˓ *99,<ۗG.*+዇<_b&`Ϳ?0 q>cC <I mbx`M݄ԩPE'Sb0wޚbjj=|X !כ>ML] .՟sB3Y@,yh7Û/1%9Ձ2p†hF^-"1o`q bf׋K;b~e ~YFsʓ4ԏwe NKc O^Wq%C)xZ !8RA pu1jhqwk t>>d3JErT,Bt8ΥT$.AXNtfJL*/fQ:k9sG9PƑ$ȓ ̷:z<\0d=E9E+oFq,ӹ~p͟kDZHH si%6F'þ45bu*J[J ؾ*u0XԨyGG r*/uvS-v=*(u=PMsg~RD!k -Q`ZI!-Ram\TdݡBsLUG-բ4X22βٿL{sׄoqSC-KhFd.r[Y{̹XD_b^+k)L@yM>jb w, Lm7(U/^>(!+|ξ?ЃN UZ]c %O_˫wS\ jܕ_?p4xo06鉪_bc6F lA/K9{,%6˖ϋjO *2<-"" p A%0GsH}ɵs/βcYon<`+Zb8M\*6iʯZ& EhjP0T+6>dR[`yA)ıib ~Xl Y >Z]&Nn\ʸE5_wkz5MvWSҝIyeD 6ӭ9X(\X!Ro Q$ViǬQFYJy>d[X/5^#z | A("P(f@9:06'o1 %( V)/h@J%6I M|nӐ64kv(o:* X`>!^[Fƨ) ^*m4U aU8{= Hol+ x 8$HM7RDpB) .*t6 !} ,VaIS)2]U9"U#f"bbp(*b>(ðg K1EYn|GmkkM=_nf39k31w$z>D'\(͝}<݂!߃SK0fQ); [XcI"iC9Hm;prdʺh9PƇbB3+)~4HdhP-i"!W$C9qTQ( Mcޕ6r+lz0`1I` yl2(~%GY}:mˇDLH_1~bkǴ5(&H%ZI6|[D[`\ /ص^t"AW>B)[0G̀dOTP9 )Y xY籿L`ibvOX+`|-NP%J\Zhk e.ՈN$CEP ZzB d7gT@͡Q_oYtݱ`Ϧ︡ZwG1_ bJ8l,.hB `u b:JxUSCv4p,ILK^U- 6:Ibh)&v)dU;]rƊpZ)+J#f% F"̃tq2 Ee"x(%g̬i:2V]1,KOtW<+=QJ"10D ڔS^H:kL!A'"6ή/۷m*p:Іio׆3g؁É<4_@6.OQxeI,qZ*}rtAip/#^"qx:Rv-*DFOr):085 FBMic|Z+O[@,IIj闣Gx`(uP q%+O/LW~aϣ!]gsEt̵mpu֏4F?A/գ> . 系Tov?1ލcy }OG?y`x罫F+?Y:jߩO@xؑ7V.f~_/_=4As&jB3CS mvJZf_W s_O5Æ%+j|:9v:cw8I(E QnK^BCk[2;{wK}.B֢&6cH|ΚZ[/Nϙ&=7hN/#>wa\J7fC3~GG{*)b.,+}2Z>%дu߃NizjlM%.{ av-J$qN0w߳$SZZ.كYJvn֨x\l:(kiͿM\[ AxKؾ Oni%s;ڱjFF{Zmbviފjj^s?6vs]bGc>̃1}oWUa*]J-y[;D%ЏOylt_$&rc7[ 1zI-B'{6|>lWB ׫Cl _ IjF6`LDP2s;NJ(Ld\A#ܐ x40vc5P2`h^dN༷T,N&u;˝^iJ(& 4ٻD9awGv|<=UCIDSbSbkd% ҁT^"=Pxsl4D[F4od2o 48I4#(Ą8OT86KaX{X277LQLO `l0y\=L:9>Ļ~s<);T=U~B⛵^Dm 1,!lmW:j*5d(0Cc@x0Z@Q˨pŘ'y[WJd=C)AGX(me,UҌuma2;[U[8(m|"'mG70Vw~{Azui^b"*Crh$`N@"pcxVr`DJUbsÓnΞpglr^:oGMLX@$qiR8ٮxJ*^ i})Fu ):iCd.^٤$vIEv}EV'r"q>&3ڹ8Ʃυkg+~kea(;Yć4^ƴ>FPQpC7YoXY S'G̔@&)Z팏>+9ГfBs94u)qP&p=Q=\YLKֵ]T]⃟kM^kA@d*IL:.\_mҠu!JrE]܆]{XL;ֵ=-$4j,$$:ٝ ˕$n( 'uяhlD?[>ϝ!wØG>da'e%?跫]~ .Û6rJ؆=UÏ&g~i ӁZ;Qk|L!ڐ]UfLeMX 3Q/hj?zC){dP` /*Kw\e)-h,U/ojjUW}1WYZ+v\\}J IC.qU˻%U'5 %W#׏~q5Z>7 N@Á}XekB*JD=^3ww|AM3bۆ7[jt~vyM26ٶLtF-iN X-8R$X&)UIWSx@2}6ыU:S?|Z~W.<5Tɭ8Y9haא@}m2kϙ%Lo`e7)m@Vٴ7IeIm"oR+"\f}qMcI=+Zꡥ]Xi{†K<` pIR8- NhqjCYHD) xp ʄ kmIOX-vp`mgCW?eIN&s}IYdbYlvUs>I!9"t枤5+RWGN1Q:FG+utA9k_ZwtI bS$|K6394iuqðW3ٖ,w)ET\b q'bt6s#lXFcQtu;g)ik}6JR*2J#dC!&j^&A/|/eB1&?7XJK׵[Ț)v^d> KUc:sIQNz\_<6z"*ŝX$Qh؇В%s'Q9hgr>Leiɭ7d=5F8)=IȮ i/SDNM 9ͮ!lItIEMQ\!-BZd-P-!2S5`0W#Y׋{]4Q+NN)zT{Ic\]p[@w{M[ý~#vUDn$m~I(UWxa6O]f.7`Z;}ћ弘off77f=!hnZ3>yza;#7yoyu3I oس|ĭt%60=#Wx5m/n?k4XG"T'ҞD܄1jwuZ}pT6Ÿ)VcػsiUʁRo_4ݕkצ4&c!2E,ml׌2cP?2+tͬzV>LާFM<:M }LokQvEkAlLkJ`Nٵ936Ѵ7/.k/uHV{k"iw6wۼҗwn17~3-w7Ӌzw;;n-pE;.rl>Tk Z &~_?onI0_Woj'tԮilEWZvE;1dKkc X]_LT }<0 z7XwKR=I`c]BP;$Yڎ)yhLeϛmf#1_z9ţN;&!ׇsIX}YRx^S(p;e!UQ3R3eYH|t9F1Yx3iK^toin536 / Ni+2L#Kh|92Hhh۠9!ǪEuCb=y+Z-NlqlW$owhnS*Y)gISXdA+ez Kc"Ji ֳdxe^gںɞ* ˑ{ٸ~Vl0XbTsOL12f'P“ h>cZ`"!KoLbԘ8l s$1{7aVbϫSMYie1T^Xi9IGۤ9$go%7$ =x@)u21 ]x 9Bf+ -6@"BY =)#@âro7F5zF9P cB‰0u–$&!Ά eF<6 PFw ^{1 lK}* Πx$wpøavu'7 F5[KX9!sV夝Gr\/܋Wy Pked :7Rpᓱ 1CIL̎>]7tHPrk E"NA1s3sI #PdG Ͻ+tvqd9=$q 1bw3!s"% Xk3 iH< ¢1d0;L:)i L!{sH6) FGf9 ǠFnEP2QUI)o#7c) *]1WfG +AqU]3r%OECz { a= h~#[N:sGrɒyk˼6xP&oNzV]n*+i/_xSMm+C{S]_gZ)}?rI?oP*.?2i*^[GKH >b~Rx}L*.#KrfFs1𼰽2&&Q0dkNfɤd1yyV…f% uvPqҫLQ`/($tuOB  *sh^os 2V%z6|@@ۺ>Fy$i̮%.|1ڠOOVB%+$LҚY`Z3qJcb™f83YKLkZ%e K\_aJ*)c~rF+t 9A6'Nwɕ!uu%+o,E#s87rv(!=Kb/ίoux~-zUmjP՜F TR@>EK3#U`aʈњ^')̲ղ 2J1*W.z,Eq,)}-!čdZ3,墌 D L /)U1;JIƢ8Kv#MHZܛ_yTM6Ud00Ĥs93oPhQeɴivTV6؜NO.rWk,M$lI,8S"; l)xH]8v3X~lR馋i9u_ &(i{zf JL4D?37LRSKMCr#`J.ȔH-Z$&\02ROf *Ht'{WƑmJCxfR a&1'o؆Q[[SŶ{%jf;"wWW[uZAIAIj-!ruPҋ<3:J6bRgJ߮y $8r\"'|1}:%ß.LҠ'wCj0]x/O`?G+.S\a3_~*T#3/0ߛ?nUgcS/?ٯ<7pt=v*åԬ7\-?F lcŒ4=FUyԛro,ʫ܈b W.b6mԛl~@949M ԍ*Lv~A7tS]4LjÎ 2uewcrƬJ̍Jcg-7>`Z>9m2jʹx. )s 5!fZz\^ڋx tzOe;@_v@ (xh֋&/1p uhBiL BT[TAE.1,åa9PTڈ<yn/BjhVxnx aAye<#*`?ʹL= >]ޑ'KVUlUZFr3xOeoC}?zi4&},+3{6zM|Vl-[;KJ0^/}c;sMc I,u NpI+0BߓON5wLyl}G?~\ rKi4Y +(Kxxđ"E)E!C[ΪܺXX BnCNu+_H˯vWS*h Fח6wCi GcM:Zp`[FEL1PTPr$dh M*cKryEa4Ē}#b, )#18ˮ~ C_vL_pAodjeۿl8>]xo&*qtZG{Vm4)xwAiʡQ^q(6߫w5.YЎ扊"KoI`^'өoJu]X\d Z%5)pZ)C*M(b6]T<ӌP EV=Zzd|iί|8T<ƹ 0}~}Sݼ֥=zNI1Idj>XI 7ð?5i'$:3j\.7I6$lF1I=4\mBdJ(LdWBIsCB2hh@aV{Re:Sr))Y0`hM)zgZ罥B$ I]JguS{3t!l>1U[Lzmt%T^&.iҖ*c΅zyLh[ , !y-& Ixh$qɍE51Zjz7.Yƹ))j';.`;8/5V~aۏ͆:9-/qk~Q:ɢj rw|oz]1\a ekK<~Rs 1 <-R̨XxOp[WPO x< hB86( u3cmpfJ} ra Qp62>R{9r¯n0}1i_ip'DTQp4$+u*x $&nqJRB@1"#{&W:(ᕈ٨ 툱6*B*u]ۏql kV;vemQ3k;{F vŋ"RK>/qe:@@$Et8\z7ERiT +BTYL$(:)( 7c!#h&cvf> |X:xWMaǮ(kfD1bLjxTbL ԈYb* ]t1 :@i@zG;(-7NRpj )ZyGjLI3T\Vquǭg L'q5iZ+/yQuӵ^D)T!u 3lB@~% o 0PIβŦamcW>5ލ–wo*<#ul|;e"{,V?i# ֖XoPR3&;98;_]Z?8*U`ӣ?嬀~G>:F߿Uqz\%bxah|\B)ȪNEL0̧\]R\\zgW/?J[9U }͈I#|uHwx4F(MS'ic,׆1VZeg(){6qG:O*H5P00m8Oq?L耲y)nh]ˁ]$S7\ ۖG"@mqQ&] e<j;m|bZ.|iN{ ivBK7lXuvp%ݯn ۡ4Ynlv5=I".m+DM QJpfEt5k ]!\#BWtBTttutťQR+kL[ ίoj2]!JѩC+EtBbm3(%J*Id SJ[CWɶt(UDi S[CW}ẏT:Dx5o]!`՞A,-thiǮ%h)i]!`upUkADۡ++][&ڤڥ =&VKqVZ?r/EYCʥTB !ȧAq|Y|]>gٵIκYj{Z.QK X.89Ћȃ1dpAFkk.:#?^+}ne,[ewvaFG&nӬՂh'5-NuK<Gc'{Oa-;m4kU]LO5+"B=tp-k ]Zؓ&+DXGWHWpY-WWRvMk]r5tp "Z#NWN]"] K-+x{-thUԢ+i9Etm ]!#JөC+e53EtyA+[3vhl:]!ʆס+CLhB3Pַá+KjP%Tk 䝺:H­G0~v,{@r[]ˁ]v4~FY!1!J?w=lViTf;4K7-ttî "BV%-tho:]!ʵc;:bI[DWش"-thj:]!J;:@TNZDWXլ-th^{;C+Ai[N[CWwSۡմt(JRhEtE{A+[3vhuǮ] ])FU-+5n.m t(yBWq `Cyk #=]ֹ%Ja;:@2LjӦuW BJ7e $]Yf,kޚAD+?3(J*m ;$j|M5^2ZƄ.fi˕;YV 2umϕ;YYn[ Eۮ1ke4l*@cGͱ&ZȩtzPpP/vv1gUEo|s>wt|y":.x?CM"G~ABOfXHR!$ɝ%&%c"fppßU̫A:c'Ka_zUBL(MKOvRrC )Hᦗn ^B<7eo/#(gwWW]UQ_qD1>d/YZۺ'wwYb_}XnˊVyYƕ˘¸z''Ikh:XZDSmPF 44d) .LLpø-R+8KYF]")h & (\1x>9sL-@DOTedUA=2:pBFQN8e%#)* KU>Vd8tkc/ P&UyR!=հUb姳ՃGOײַ+Fy^ 7=[s2=8)s%W^[ 1'@7>2\pq!,zE.9M?+pp`a6?_㏧ˎbނ1_,Pˎ2ǔcJIUmпygPWQ;2ж^ϯC 7 0沱T"͞`O7u6 P{\771}Vx!{@5^/SqƲC@Dgx2}L!O 1MO)`Dd=i/.m/$?nv gs2MIB˜Y4V߸ʧRҘ^ymoZ:P2{ .؏EӧGYUA*5՚Gl17J3ID{h0H|Ɇy5fPm *+~8IcQ.c*O›< k R&mvÆBS4fPNjN X1/s)?{WFOt" w fq{-pA@od)#$Wdّ,[nJ"YU|XUԊD4D8ҎCɽ,Jh4r" !1)(H佰2p&፳ *3`8/`5oaC<^;k޼JY"OBFη#fqO0z_-_~XU EfYFYkDPb]!SUVKY21&uϚɇlRlim!)[e% gY鍜 r<X&rNHzMw&JV18 H0/D(Pb,AbQBL`jKo*tS"ҼID :d-̔ 0++~#; )dvF.Н]hz%op >>}@\H?;p;^ѓ+u)S 6AҔ`L!}"79r7967< C9Eg&[ҾۤtZI24]"06tm1!@[[1XSת`*Җ"Y]Kx*W,VJث٨ 'cKQIX|AZL?"ea-6`=& M1Qiœ;?r5d>;;eOG q] /w{fpSS!]VxqvQmM f<=8ؘ5ÿHMI5eL^I9Y*ۜ_Yɧ\GvƏ֐w)H*dj'dR10YH( P /DI*p3鍜H:,{K7v_XslOh<ާd^A*y^AO(/LEE\a%f+dd4I(l)@Y-uAGShhe \d÷Ǫ9m_Λ>y8Qq('K9ye c"vRolg.XUמbsA&kt!7}lƔ␢} t8(v-_ElV~\5`ʒ$TD 2#Y> $g-!8ܵ6"'rFD:ycZ1^s"d6/PTJF-"K6"$SQP]S d(\yl_V E;%ر:\Kl5$ErlF=T>qw.xFohB#qs.aZ'5D #O._AIفvFj+tM,Ie`'a!"3(U?Te Ӵ ׼#&RWGW_A:$TKBS 񀷒$01<$:; Q®j~omVn{~of1S7+YaENR)d.ZHFDYc\BkMe2&^v洸x{4}ıeqa>ӗ[|uͿj"z "0Fƒe{G!|>Żc_z ^/(bҮbKv8OڭW]e<@ N''x>^,LBD !,:?:Qc6x& bi'T!5lCS8 7O/D8zJт=H-y>]}. &Ҿͭp3m,o%ю}7V-B^ƜY1m_`= ms! 9~ި6B]2g瀵"dmh9/*$$R5-i#ytk>/f-Cݡ͕g(M}^BwY7W [& ?@zRXww~1/&OΖ,0o>I=@;c(ccD@MP@&N} Ճ8$DDDQ1QbP&Ef`Nd RA6^c(9[TM02a2R'1 !@ P$MKA`93QeS?GeKxz> @&$E(ER6uɒ H1+H1{lAJM=屄Lx?'d (mF2}= joTz3k+N2,YbmO9xX)z)7j4C#ZU黹j*䢭 6\jlN+-?rj[$ F ET!l"BmEs`'Pѱ!Dx;kbCgoKH &e0Ji C/֌_3U ,3Յ=)=ՅkjNRwu'x{MgeYS"t.QNA/.'f}]I k][{] C5"*SRZUdg3* E@ J"6a_i: Xvoq6=km3hޘbiwJmTT0d@ Fbd&^I:9y+M5Kö"K$U!8RM1aȘ{{և_F}=pvThPh{ֈvЈF)Ʀ–& |ơ1B \ aۘ2R#UI7L̖2#Hk[7rnqJ)T'֋f)\KՋg8k}I d !)C6,kJ +FI1(L`Ccчqǡzևp>|>h*$yrG ~̇$( [ яOMMm'|YQCwլsWnIdž\sGxv-c@Y9]ZdF4_ BrmS\3sn"KֶM^#aɞAuq׮cd5i㢏ʹDhL,n&5wSB }#*GDemV{,D&"𖤇z% dP"4?eT>Y -}BE#mwmH_OmF*ޮM]6RKBR][_P*`>>t7*QfsEg8 ,c](a+9IExp݂x캱e;Н.BUԔ;xI}h/I."$2:rXPQxF5WDIl[]RΪh)U:"D+3&O&0dfL,ȶh4Mnκy}v.Lg\&)zEoۓ!LU;ۧmh@UA rYzJY⫊[Ez@5G)VJul_s+@p :+ő%GFurHBJԝ"Ozȵ=(“H% MJ/@& 驏:b P8ǣr?8]cG^!M%~(_Ւ_rq'!j2~=Y<7?.OǮB+1ɗRPK ^ʽLJ6u^X $~(niVxE$ S s> .~E (]G47QN8sƮ>t|f_֐(]<P -Z քOI9s ـyq=1i ]M|vx 4{voi-Jڕ+opW~ Srꆲz{ы 6ҭo-/5OW~4܆'$1Putٔ~zJo=ДӧMm^f/"l+q7|w׃̵[o6~ʻe^7|[\azK۞,mʚy-{8o5Z msd"S PK+R57a S;Z GE9ў#c%W[Zn1 YXFؾ\hUUԧTgd 'Bc `W% p^Y .wMKw ?ecr:+M [Ѣ>`l&^U*+$0ccbc ֺPyK ِh᪞v8iCO { OLx/hx BPE!EH+SB(i-n&  8;t^O@gKNkJwB'%t8a :Uj=K 6DbtT9[zL.UNYJ> F Fi6q:hQ[!zlr( sttyY.YEqn..7y2OwnnMݷյco15)Id,]+_ֶހjcAaj5 7`K֫6g?*BcR_Q Cj\QJ \WZΨ]Cе7C_G~+㫅Nc79πo)uHT kOEWp06wc:LP[qKgD-cHm,vA-˼DlMePeV4ӤG%}Y2^zVsg9Alpֵ_~zY&Ah: 1;MVCeSoeh)j%*.!j2X)p*}oَI}1!mm N@<0op FKn#u*(STZI#a^ ]-),k\'NI^]n~ Uۚwd9Jg+$GOoQ.ބh4{81 ch\s4Q;pT|)h&t~ϝ #Xlȏq /ؿ?~Fh5E;7C9 Iº)Yc"Y܁s۽W-4ѷ ?"+{_#𠇝=Bۢwe W7Lg7pP ouRT1ɏW=-.j<.?|_4v˫5 ╎w글u8 5xvsA '&B&Ȯ+!$9"BBBWVڮԴ+ITFte"B +%JBdCW5lADh QjҕfeBgCWd jI ]+D{HN$ˈ͇.%<BvDdOWHWV]Nb5W]NWy^b↕} SjXCG5g5Oh $Y@vSN(4g[L /(m18S!,m}کΗ"}R;pV:181]7ؖ[0'9BAXDA'aM F}G5^QHo߾nd1[eVl-7*ZF$7ụ 7^nXw詢Z [ ]\ojh:]!ʮYezzbJH2+,U6tp΅]+DiuOWHW\SrRfl ]Z#y Q^ [Twy{orֶvI8~UVԟ8-޾B;]ƃk!n5f:ozdZLS[7JpFy\}x#Z&.!JћQ9זe1es1JT9wVǎk[WY݆/|.B\i9 x]*e\]J" N?X`_V}X$ AyI< kCD{ƻsͱ"EE€%SU=NSQ9?9ݷPdxt/1? QCK7 |N!W^DXO2vgE0X__ nfG-7x×gܑӆ'y*9/\,nE*Ȏ:ՋC|#s'HГA$t˫Dzv)MJ=`R{`YJ݉V<{I5AYVF<\vo'+!Ie~zw?Pm&mΖ_4\FzvnI״JZ(ocirɻ/w9aMbãM~:SLѩ%G6Ie4Jxi7Ҵw8\2vdfGr#/e>dy腸c<'7Nyr^oV:ܯ)t8H \T{t|t8Sqв"N%vQ5v^w2V1ႠZ| TԺ\kssZZ~ AkzN-&Oqt}Wa1w xmYϹ{\wC뾒WY(ݎEt?xC/ӡ+iT h*TLWHW' vdy*tk=8y(`:G SRW8\w:t@JVWgAWk*qXnmo*#Jf?^-òǍC&́7͛7. 1E{[4bP>E5IyP蕟{x.a7~bY*EԈr~)vMjy`蝎ۣ MB4։?_ J7\>\]pUEO ?60MN>pۃ7Q;{r%ٺ5x?R׳y a}n_Ak>\,y-M/mR]ؒ2V6&]5YYU.GMfdq+2iPЕU-F#>&>CKvmo=b|}swy;{&9Փ,BD]=p1Yڙ첕IXmJYbpuP*ldhZԘDT 6e)JժkX^Tʱ׿0*FZ)BCzӵ+ߺ1*X=^A;T"K59tEkbBD[K!BU![%k`DiL ͨw{u6;E :Fmȴ5$X,nmYiR o:CIMMԄ1bN"4=v@,Pd0vD4Ccv[\D\T5Ci)KNI8_׾ž{( {}muɲzY !smw ۽'Lh4IUDPIɜ; #0iޅK@cVYjO;"0J)59 AҬ*ҋqZJhH%%Ɋ] s eNJzs*Ô|XkQJNiB0Ǒ֑~B_+FztN V\jH) AJTM+ Bbgֶڔ^*QT3 I:'͋Eէ (хk§H({XMI1!KutALڳRȮB@&Qw_KͲ#oG0L3R/3ޅ*1GS5>#yt_V 5ȤuՅ:@vdm^z ,:?jPJ4%wV4ZbM=A!%DB>h AJ.^ )CP=n;H HjLEQXRr,a3m>%TB;뒄 Xut kR([|Pc` &$˽e;VE̓pD"(YC Zc~,BLJ35)JL T "Q8TDYUQõ`ɥ&a!dE(3A68Ftbl('J sM'JZ!gѝ4MUJPD[vTjj5]ZVE*> 2VIIX@iT_j:hIYTtcV-6q㠝;xy\,]O`҃U&ҳeїxtqt*ih$ti#ll% f=L(8w%[Y:Zm5EZSRP'ՓFCoׄ bL \.z~5>62#bpPaRDluHmk"* F m=EדNJdt  J`Q Q%fdi SRC<֢6Q1l]` v߁yE!8ij)ڥA$\sH._* Q 2ݡPʢ#6:XLÎg=uS[sOҕȪTqQc@sjo6i]0s0J@ZXf=ؤ=Cɗ U{&dd, 6#TK.dP?uN^y~5kW!*Z>@MtVkA֫ѫ8AqI'X©rltEiaӠ/zBL iq#f0Q%#pT5]QzB%aIPrmJP[1xx@A4DmM\4*w3,H1ˡ옅jI$Ct.Xz꤀%arB9oU ]шw9OL0H57[h#^ܼ\[acli4TU8};Wz5G`)ͣnbN7O x~7~ڛSt ve)oA. QbA;lrٺI@̯j^]>ގNi7^ޭpŅT?y!/蜿mδZCw~9ɊxҰusx0;>mf mz {II=~<;zXJO0oǻ*AVrN NSqZ_P:Nstxw2@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; :h՟ȀW8~ D_c tN ) N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@:_'(O ȃ8HV+ tNWb'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N vLg{BN ҞઓN BkKwJ' tN de; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@z^۽W_VSjnl]Zw}|d\Rny:%dK@k7.l\:gCoUƟ]Qdy*tEh]|tE(C`:CrZ _]ѧc 'dOW@is+ G:{*t5-t"F3]!]8#/N:uEhxtE(=?t*(]O30]Z N2]]Y/εKE0C%~И43_&n,^leI+9/Ul[D/C!bW۝;S7 s#|<.B47oQ$-WZᵇ [ɳǺBo0[\!˃2Be+k/0B] #Tvk/CߝGY߯}E)Bb X!dNXW&Ч>;IgNٷ'nܯZfQ_bާVaCgTLJ _ ?0ɇgִH>f(\VPsS)[ԯgjvlz%4WC80=},ԊyOSDpejʴzjSÕb W8/\C[WʺR:F\1ɺpr_B+T+Xq*nquƾsqUOW֨ Tjb[\!epM>凎UOhzji:Picĕ%ڜ Z"ǑRwd@MJ*qkMc˘O` 4ݸ$=-25:↳ =&ȥz,Xo86vC: ]=@˜z*mvm톧6=Y+l)W(W\pjl:P-W*iiFw'W\pj_q*9iqu23\e++e.BZ7WҊWG+8!2#\`t6B2+TkXq*Fmquϕf\Qh.q>o{AW=;ymI峓fOE^`9zd X{{7۪,?F;|Y7(" SJˆr.N"7CzJeyִZݡ',5'՛.ܨVO=t(ra݋k׫R4k7iM4=UZx _~>P-W*+{@5*vtל+lY6\pjyJE[\!Rpա֔ujeRWG+i4#\`e6Bv/V Uq 6+e,QfRx\J]#QpU>B:GTk*iLj+c,#9]`yk^+Ph:@ c Ҹ@5DDNT`mڨUt9A}e*]lE>ErjYw(x* tǦ]R98j ֒ 9GQ-/~6\R)7.lh \WOmzF(i+Z==GQ=rRWG+N W"T6BJ+Tk+PIIk]#Bˌp%g1rWV Ujq%)}U{ŕW(W\pjj:Peӎz\)*! o:6W(\pji:P"-WjJsZ `uS P 메P+yN[(Xfl+Pn:P%-W1MFBՎrU6~W4~ TJ(q%5$$*Ʃ$CNFP4M>8^ ]x;vlkd8Ae"j?YPZP1ѣ+^AzFƞcenVh }Uq.ټhrzBRx0gNe9g'>;;wx߫>/0a&~aL!E+H"J ?`E ^’ ӕ?O[ UFenFTK/mI }.d= p5XXess Bmo ZF;N"n?'zoWCOŨwz A8*:BDàg"LJ_ҤRE UN[n ct^/ȣ4!^]R@^8&U/9n 2Kl$˨Ax'%̕)fm2>dνtGyy,{&Nq7NץV?aZXw:#:sףeo,{]bϭyzUվUXue8Sm5?6?~O]9{\)t=Yz`Z/srUe- RTL§˴sU.gLP}t~Πn0:#Z&1V`X^FT(̺ y/ݕxZz\Y:@FxbK_ڳ>?vj)f uӗMk,m~Թ#GPU%:0WB g(og F~(4RGi♱11^c:JC YJ…/Bˋn܂P!%鹈F[ A/eYһSҊhJZ^@H\0RL ^ ]IuIsγ=aorvB+C~oN 7߰I`LJ`tݟ[Hp68ڷ+oETjx>kuL!Nr&Y!F 2ň )SY:!ڗ8a @M g4]=[Ӓk#A0vO^J0)\Q$e`&d!E,K#Ւg/As$Zmɑ18a+杳ɕ!ߗr:+",5W:(! |k[{c~-iejp@c1Q}!!>"d/8f_ӑg_;ƭ5*JIU&]:H5:&J'`zc'[)~0q֣G?O/9#,`vꌤVQI)3SolBuB]5BOl~ڂ}OM/( (L0&RӅ%Y9f~>{W1Qũ\-5rxS?9?M.:yM 7 8?XJVf[Vr]rC}Ve$^`Kfw~=]H]#C-ɶeiS91%nwg(GS咵z[Fqjʹwޟ nmm]Y;V~M'I͏6Kn)ȒXYXvp6x}V F3-Ttl W#L:/? vzm6aMMz,Em.E3AE:'ןXܞ`>XR2,JǽtF]βo!h(-n?;$Nf' _y/$?eXHx^[|$3HvλVY"{yNegK}0!<8nZ֥t)S]&Tl/ FKn#:USґa `-) k^=ЧbgWgoҽƍMoۦwN*]a(tzzvaY5pӍ3mIs(<*Q]UMIo->LL.*ZgۊxKs$(+ѢI4&1y" ˜稹l9'=:$-?W̭(f9+ 2Ծ,%ٻ8W 4EZq1H h`m}c\U˨KU:JYneFf2"G̐,(eШl3?r͂y;U)]"oin*}Gvj;@Sc'/>YKmB"h@]3A It@^Ew~۩Kz2ڮٮfwq7z]]uԕϡ;vSJ`jC*FcdvQ4R|.HFKuG-(}KSTHwu(Mn/2 ؋LA[׽ڹDhgDhnrcr+v;36>{qV#ЯgAc-*l ϒeƒs(w$/A7c.@;02a@!4ߓ4oiQ{*|4*Di>|f,ͷ;DzHwrS\ߊ\9wꢋ PU1G-`}َ͙6xݶڶwGHF#R*:3PIr֕R%0[)Y%ƺ~FL%eZ=k "Rl Xk vB;~m)Gi/'U(h`uko ͼZᆏn?7.7^n= EM)_l'[jLʢ6 epb.{[sVS+$U/2%Z(It$0rKɂ}d s>9[|miV5BW9 J #MLu DV֧d*ST$c*Yz#r6/߁N|è-m7Lj@E PPtA9#&0!}JkYGAţ)3P?b3{etZTCx#*1W1b Ef<# jN@prV%䭀XQT" BV!g˔%G|1ĖU}>~?FMꁭ>:VXr ))U&@F>٪(myDҵ|P7V\·L+.ejҎDՖbtAYjLo٨/LRKrPڳx'k0֥"qȶ=KNYS),}"i͛stts V clܞbqj[mW"6\+$mW-1y-!1$=h!BR )]u8dH) C(ɚ+E}l}B}RspZ%MCd+f"&":pFE΃5pS{#a:|$}fa&ec bheѠR}n)'Qx"jXtRmA >& dQRt(l$N. s1dnjkf]6]_<4dH O֐ 8&D'm Z^kXi^@!I)ơracB|űoubkS Ik,D!*T,Y!!5^r5 ʭj426$cr  9a9̿1|N%EØr,W+$el@2v^R˝3"}!G&|8Fw܁G׿;p;lѓ:N֭8VU`{r܀qnEu "6lR%("x5Zl1 t=kOz5t?)u`p555ǪVl%xMeXBpA;A$1C))(K[RHU*4PΔDQh&߳FSd]Xt9l|2NIzO%`(M#$Lq]} n1-t>i45Q˶ݶAehf*UNVgQt r4V#qݼ[9 -;q] gFܞsDwjRV{'/ZYd.;qNa&;M}R㣃Ԯyngݳ+#EF`<;+-~:A1NEm&ǹTcW$c`P+2W'JU\KtԲ8P&F_^kJoy:;OHOe?~x{΋-:m t. %S*V~:a5?hQ:haQ3UjH4lѹ]ubh I*F6*ύ((+twc 4*mlxkjc >J=n֋R_b bjP1+LmS8dUځWA2EYeVh5 X}W8gV#+3 -&61Pjce"Y;(,R=+ g- PEW!qT - #CbR =yD]ź<$te‹wWZnWv|6Т^̀[CUKj/Pj7j] @ XTղզ诳RbQ!`hY%OhƔ-ѷStVCg56U D(} G2rBeBjj8H%zBʾ_'E]Z9+"ƍs)0&Ţ1\& IJ􁳕FeGEh7x./8v:}]ٽ2B0bH7r)e^FQXF,#ҌIޟ4*ϞBR2K;8u!S8uΛ<:^SKKo:r b}vQEXb1 `oj)*=+Z@1[T%X!8;;:0-7.E<)AX.0rQV|1 VJI4r:LX *Byv+};!W볨`_ʵ59$N]pAb–D$E;xTy\ 9/GeZ(o̅p>X5 aLQ>U=֘XN[P6B& bm/?ZCPζR `A 0͒K&+E!D4ѶQmDFDvW,hA(Bu])46$0~0iڰ˿]sG`Lg!)#˓i.~V#,άnvr4NǐޮjTdNy秫9]=-s\:TBit),Z׼\^//LbùT~mkIAYKSK[ p; K9Nϡ+1]wh]x9p,'y+BuGJ;<\?:tu~iFk_byhջ_J5i.5\^LVf1ud=h>?TiX+yl7"E,6|Ọ([X'1_8ʟZ&I -|^o//u )'WQޖ όwOn/+(m jڧ?4_y/)&1A}q5gBt6WgAھLqTRiUnO7bk'T"\ogY&YO>^˯bI+I6> xzw}LKϗ!?#T-kQj _UtjSPEH67E-UHYeerD[xaeiPQˏzzH@zuMX5<'4ݥ'h*C|fk *|uT.OY"C\;CL}ts^_-#n; e6xrq<εؒ'Hl^W] =N=]>Z1KuR.fbE頫K9{ER%FJ!R՞}schwuq\E<_c5'tr!횂(!9y5|Z9/]_ߧogM.lw_aIx,fmkj79Apzzor+k`m1o7h7*i iyR;CZ"ӲBYsRjs59(G=.&O]Ѯv.~3 B]_3֋\t%`P|A%YH9zO rb1b#gt\WpYfB[Z_swA?=ݍgvL[j{lv=BÏPoOP;ty*B^@pڥɒf?sh@;̡tFS蜊Qұ8[ζ#_KF'a7A?ڊeQKRr}e)ppȞg]]ݥ ݁qo{wh6! ׋G}ZyKØ/X iY\E]:ks&KRIzUrs @FRrp44/; ulg t,gOfBz^s.)d+E`)2rS<**Kųq\@K/k,}dv 8HP\̪XǟǿE1CNbTZ/cwN> &CR66QCАpuEh` C߸j]ya|V\̾PTjCԆڃF /v-KIT Ч %.yi ,2$h٠1XŁd9=U̢h'EcṫTTBlyS?Γ%78 ;kO%ꘈj @ćZ: :=Ԙ ^N O8Iq;v 1 l1m}@rW28RPFf#zCSu%q"~zUYt>uvJE1Ł.1s4EMʻ|䔃ۅ\1٘@+;}ag<4<πפlO!iȍ2xU>L h 7c6y)'~\O~WApyh,Ӕ&h΄eu'E3>kHNSլH?WX6X9h4mʬĬܖlЫbifIlvх(RǔOY.#BN hՖnWSx>MiuRh˻6D̆["g@Zo eĖy; wѧYԦ^&a#t={g=ǿhe=ȤJiM.K~TBSD Hg4ZUvԿccݽAE=>i*ҧDZ3&՞L*!o?'?/.T*g𡱚4)"aPw휗=9/s['JTafrfVen eRYbb{WeLDIk6EDErRE&$]pZx[ӑYItOu&gÝcs925#z!jmOdqs1ReV&^rM]`02V4vK%vPcim`Y+dI;qUNgǹvs%xzW:yFa B!Tc*uvߙ{vOYYj$H ><(i_1x{,%Sp{]rgenb{i{ǓDo _V 肣W8P*~࣠cw>yg;ȓIM5/|w;L<$n*lHahi&Q(Bځտ[gt}v?aoوCEr'/?rneÂv:0zr4⛟ڨ,_<(]M.s^S>W6inbOGGW\ǟoWJ3]LtuihP&0dXHI:͆rDSt?%69!, 8 &8CkJ)폈| ϰ8(NEkt3hbLjFi7Cծ_L0~oá}oGoZG`oAob|yvNNh玶Ɵwf2[[BCNn;X( $]\mާ8s'c~F+hXv6 ئS=jh6N z/{sk$E:3/Ov{?[?ˤK+^;#t%d)lAS! __4+S#deNǏ9ݳ0tDjvggm3G|KJ.܃ o~M|kdmxv/ފ|Pt](\zyt M0{{`Kͼ÷y1}7eF=? ?X˓6C;~择 R@3:o7:"N\/&N  +~ M\VYtlkg[%fΧ^6/0~+Z@jH, Z0@*A 0 )+ 6\r]-"똨R:\)`d]`XW$WZpEju+RìĕYM \\l-"WR!ʀSP(mjpErWTqE*\$RAE"U+ gJw\JpurJcjvvN]"o7k&PHˣGF㋼."1&g&[4↑2EqJ6~Կ̷釳Kހоm/OyAt?ݫ#.,l~MV6 k)hi@4Ju=k?$TcZTRbB ] Z&~;hMvtz%"ǣ SJ29?oGl^4<Ǘs{ oh)íq`nrzcvzJQbϘNvjRiy駾z"\`wr;Vւ+;HWBkmE"W+\ m0}6q%&\``\\kךv*apW`r"\`5\Wp5qT+T P`8VẀĕ6kVP[ P :+mW k d jS H UW*"\`UuErM5uB +Ͻ&b:>m ԗ5gXJw~wSV̚JzߓRnQ↑nMn&U2Xy9"sMP5E("`ݩڕp}^ݛLZ47>f=1vr|;vOAS۩t=B[xrzi"\`jpEjWRW+ 蚬+k5"Ղ+R#ySq%V+l걮H .+R)̀V/{d쮸B"J] H} :@\QgzpEr WֱJi>EpiET+lqIfAR)Cĕf]MΠ+vT+\"_Oӫ$ݿ|:_FWG򬍈y%._M4zv'_%I*u/q{8zC\9yx^'2W1{HYCy,NҍV/IP3g?q02CqmT^Ͽ&Qh1P_eDMM;xo7qy+Z ߋ[8Z-1moG8>^Z[͎qLgo{۫#7#NW1.Φ7fx<$m,:/W>Ʃcn@JMh71~}K9)١hL>g[i5 9Eɚ׷(H4ٰMq6ҹ|ϙ̓vhS1mjMWtN}-wmMnrⳇ3_jvmTnRqp#[ɢf|Iԅ 0-"э׍TTb).$JI ){bGS FΚdT Ky;({*?KTOt^GF;//毋^OǏmYBb?U_%-J,S:qVa1rn}ە#r58%4"fIIrg#]I!~ݼ蝶q?[-D妎uzRnzN嵱ףI>7 a3!պKҡ8SٺEOEEaK`Ɍ6WjNRR$~2k^.]ݻzP\I&7٠|TKIra-ȩKSN\:Rnru4(jJD1,[ *W1(ΪJX-4eO2Jٞ`H!G'j|;P(${_ HR流{$AH0.*z-~3ۙTRqWBsfZF1(7c"p:zZ_Jߟ/ ?3{3*yW 9BYeH ÔUBY0|aT>=D}@^ңpUl,'2F>^ UaNOe\- *)w-$nUT=xUAׯGjJ(LE 00l!2r\(yTq~B>Ν;wisW;Y!q1e01i@&29f,3I9捩 b`l}_t3xcV=M-;O`ylC {r[kfKڠ\wm,Հ. *^ SKǜe4i rBӔNa. E,CqPfeh9o2 -6WJ>-5H*|ZfH} \9eu G2Ac0}ޟĝ>ωQKٛ20&I,\0,Q颱 iNRF.JaiD% K5FMմIR!ByӘՍN|ʂҘ8;IVԎx%qDr vW H_aer "vJ9j568Y㎽OI$֩z8TvNߒmYZlHuf) 9%I*rT.E3D"%T$T " '})4oχ;xoV#۫Q7kjkSL$֫`6qX%8dTK`4i|8"^I`tf$w~K5^"S*V!L<3EtfN;UktBl W$u1>cYICND8KDNtdFs#c,\/W '$YoӇpi ZSS/DSK-K ⾻aeq80ǡƗnL@-%]rsr|p@d;pXSQ5 f,8ǬƬTCe'V%>s21z{70fUb /uҕ/Xw!33,8i^00 ܂\󜄌,.W)ާᘸIV\X&;ŊB2Z (ˍL t+ܴa7 tiIЧeVI '"Y;@&VvSc(0ÆQ-H8IONwSY'( Z(M/8f,3ؔPj{΍L؍1*;AFy1?*:xEg:E23|z t2&կ^& $gqƳ瘨 A1GjڧUy!T[3K+/z!/W;NAGT'DŻ(#}H46&_皜qDLZ:g;V[Kgd04wMc4O>L(mqAk1-"E'%6u"tecJKI]|e߿/ tiVE)M# :u1%I%>(Ei9or(4׵א $j(I&ɃႊSᇳ|f]%P[+DZ$m9~ ,aOa-Db`[AOBK8my5ĦQ5D֩v1,Տ՘F܇b.G]&| ^\KnJ/dG`E5#+%`IʢWq昇<、kZ> E6|\ԩA*i4O?#t@Pn!J3B҈}Zxot\Fdm-~\+0JMT6)YyBcB[ջq )o?!"Df<'u%%Jhّ#XRFDޏ/σ={B@9i8v1&HODRH [pqMyR$ ^ 43um;[VMQ~Sy=Q_R̗Fz:l.D/$}F^aDMjP DyzBXR' Bd[n^Pjwsl'{rqX{`ۯj;6U/ns]#_ "r7yW$׏k7@:43l?+e{Di"}ʼ?89=汶cm!8G[aP1 0x^(m,3g;\MFeL;[ @xlg#&%#Z <&2|DȅG5ޥcۥ3'q^8v!b&,$QaIR5#6GJf..#^GEIˆegciz52dO$+oD5sک)&ͤ@+@q Ub5Q[ѧӣԫx 6;- LJOGEfL?1ZPַ{# <4VuܢB6X@]_d?",P9|Yen*Nz8wigzɑ_)l  CgY 0$-K,GSԑNdRCdd0q~ZV]'Ei%e/:P+1Y&iE.J\K`{&$ntDgu7ax]CzIȒ>++N0KkC;1Ѳqà^nU(hnG@PpvZq[MXT lIx"Jtށڒr%\8B)ZRh@h JL[[qzyUū@z Lg3KMEY9oɋ v:הV2$#D)M7T:M :IuIc?KUcC\&+3Tg$%0`9,)n YblrbE>+2YXt茒,*%MZ?=nzg١iнps%ϳ$}(u͇ +RHW uEpېf@DVм~GU⚡\pI EEޡFt:_Hi^*G* 26+[o^h',+EGzB^+}b 5O3ع˜3l~»Dj؏Ơz "{}LI7^c4 T?M}Og7"gZ"ҧԀ8g !)6ἺCVY:}Ŗv\3 eyn(:4%ZQs4 %}S.T5 "G"@JEշ5red!*r+ (B$ϨD~pq\A)hcJvOϏ]$$#baiȰܪyEBciՇa: "?+żPX[)NfhP9>,!j7+H-#Wc&,r;'p@^8 |=lhL Db:b X 9[ Xey}[?գA9y}Wǀ?!"ZhHU44?o7%4̢.>4ׁݎR l7nȈ:ooۉ%uJqHU'lZHHO-t=]o3ָz8PvȺbfŃ^w aܟt[JGѨ"^X"p޵N_zf9=k.-uDx'/yIljȓ 8Wk hIAL"p NSOo=nK1= ]H ^G ~5܋^z7q ޹pӉ2HWrku (YO|\p!\;&ٌea0u0<U!Jy|\ Ei*qCŨ2^RB *F3Zqt(W,4}KoSrm&Q^\X+ ك5)Z;5 q'!_IT\4 ^y@ye}7_5=>hn6hY^$%ιZM}qO#Tt8^(^zp^gR;{SK4 z]N0!pł ć8Jv snvJK2km"zΔ̐8:-fMewt:BEB5#/0&Rc q(%ⅵH@JVSo@<2ː߽ 2E4qTFsxs/ڔm/OjT` D*1-H}zX PR>̋E2|B$eBxe*i ᩫj@X+BiILm9O.ɭo`̔5B Ĺ̑E"K0qՏi2lp-[}pXRAH\"nRI,G5{YxHR|+ mUխH Nr+- 0AF+לJCeb$p>]bkH"(j݋?.;$jeLbKǪ +GJ Nn m.e FS|e:~YZt` ,tlc֞q֩u<#QNeuLX͒Y7˜H0+ 4+429ce'<3gձs(ӼC^#9qq@֩iy}.tOzH8HeQILj,`|]ie%h֋s}߸:+^d _ I ֙㋕P1'5@ʼnPm(;cFփVVhm?Sknxĉ}v=bAQ(叧"_ZhA}*v8wla ^ͭfY"K.(GEab!9]Q_DChdHI %D\VwNY51o{-f!#2x)zڣSx]V-] ҒBz%x-6'1P%ǕKڃ:V452+[H5dra+1ZB;V*|ڶ0V#5:ttQz݀sJh2ĕ5άH,ІG8QӉK%TuK騅2f ZnYI1?Z[Wq\ \(tdx|5@F=.L(D,C}haq&4UID"+S-)*UQ`Yıelk 7^Z%NAPU?㴬z>\~ ;Y'SC,|F{4R| 8J'/f;݌G%r*)iUu\*|qcYu/Y*z/̶Ͻ] 'H ߒ")EX%e\j(Rri',c*ˣ 7FE]{M}9-'Ql,>q+ܬ9I+ yIlj)AdSfV^_0XE{\Rу XuvRSin"I]O/srJ~\/[Ĭ:4Mg̓Ǭb|t_U0uDu\u,ZTP^q!VS^v3 r0SƜM}(8Dd(Ѧ+p:oQ(L2k!.F"7(gYH@u;|F˚FIUa~QV^\B.qWت5ƏȘ!fsٮ9E0HA@*t6ĕdи)YNJn쫘u7mR`+Mz '-y^Jάhgخkaڠ 摄9_e['}6BDYL:>k >]ߗOw/S f!j"ƏSFԭ!0=IP1ԼX>z( +pWsNp_?dn^%)ٷųyvz^߿ >HRኁt"'bbEx9; ^RsC2P'r a_(QlZGT/|4"4x/˶_sSܯ %Z.X[E6&ҩ-Jr P#bgZt<'3P$0?R*a)ՠc[& Uܟ9'=Y,Yg'OPNph5VDJAecHiėy""=c !s ǘH%墅B1k-9j1@g!2amcʠ,)jޭ>RY ~V ;`\@H$yX=_հ2].l:_L>c:jr2FHСY]$1ib$^@NM@lXkF%+TOR%ԔP+cYOb2tu:E~Mn4KBkՅԊA߂p߰Bm#Kd/-]ۿLw:}ob>h.ÑےR;t}tIHYg{Xο5$KA*c>h̩ɢmO׸6+:'7ᄜ5ޏgjXWDÌJB`_h0$ @5$\Mm;G%NՇfB};wM_$[q(dٞG?tpzMo]{?8Sp>Z!2sϿ-xqZ^5iH DF;u^4 =>f^\XyזlP_S\Dq(\?'Z ),3;dǜK8K] Յ=VJL*n}Txy\ֻy8W;pźL kY躰=sQן^Ͽ_ٷb>|p9tOept9`N îwNo75#? ЊT`(X|`4/a"!@BVf|HFB+mh`aмP42@< AKY*~HljE6eKQ˽7)C~x%S6؎ fzpIo|jN{oz^ꍽE"d4|S*ti9Yl%n˜ c-BRd]%pDIPu sye>ߏ@>)Ym*6[B ].#3_ ͔2<=j rcfIS{gE v\Y*gRk?3F+#dF110Ny,DRlwuf$%UuW)|ؖҊZ9*ZP`SY("1":hCzO8ĭ`Bیpt |ݷp5']forc 18eW t.5c 96X%r*]AJ9Os˙\!)bu +QN ֽ`Хsq@Z-VQ)v40/͘π kmŗ2K g޹~|H !IKfa'4ArENnlB5wԳ0A<- ٸ(6/=DnU0{u㰭5!T{ry5߿QߣI!u#8y͟>;;;2RD\/έ~<Ɗܐ)v"bxI^~wVFMIsY1# H] ~9X;yMZ!f&x=W;s~6.#5saSn1VÎ3fޛ=n[ ^;L5rN"$#Z%޿)#=AV̳/÷KJU/ Z(ͬvAJKqo:2MsS`ly,M1mZeSB9fyxc6^54z P?&j80&Jѵ2DqV3Z<|(xw aĦH2Y{! rMfy<߸z3ZK _-GI4rۚy^ ryZկ+BN WG 8¥b1g@t|?^z o윋hs\ӢA%![ʚZ3b~ +2RƬ^P8 QIjahuju}ti;h?ӫ2Vs#/8nDqsxf-ctCEYG:塁U[CJXfD9[ Y{[m<+$xg{~M/;⺳4c^954a>PRNM'z&<ګ 2^CD^]ǿ/|>ŊIl8_-C|ϖ謥kCY4SnSu( a!sab@ /yX7*=4:1 .2U27U,o}XCux_AOߣ(W2Hl8GߣEFzʪQ{Ke.htlƥܞC5Km|Wbb:NMb^f&[l,!fe)[xÈjLvݾd~>Jp'Uk`sÐ&YKBXs" % Ð 3En>7ߠz1=x>;e!r-CXy>?$[TV+^Vg6H1wuc8̗b'|~YLuֹel_ֶ6Xn-] OS=!۞=4o;Џ :%,%{-dn a^*:V !2&4Xj ʢ|HLb4g`qV'TͽVO% ,Q?&^>T` },ǔ,$~{Wc,o[Th@U~!P V̖Q82jZ3$@"eIMAv[ViniY=)Jys!u䰫\54%Lyl>4 (mX*c/_)f?KjwpOe7N}Yխ)[f_^s/ TPe#"ZrW*1&A[z.B E;YD%_]5_LR]&r8{@v%v˹#%WϝdpJ 2d6MG#p,q|YD 87ZՀg#y_+qu/#Xma4n%޾ Tq*:p¾ܯ&u)kp<`"cޅe9O-Wj\6JغEO ?,Ow뿗#?qJ [.oVܝߺ_[>ΦN?R58xu2ϫ40aiǼ! *.AqB&PQ]sݾMC5 b>G3yM5kص9^L [:g`uqB-Ō ڷmh4$%IV.+kw!)cߎ٘l^k!\abN61%o"[q+*]xyq?kIYMU\1:3b] b[n&_α;*D >o{B1@tՃЃG{5O>DlGolud@Sr1KPKP? T8+NSL5Xod&d?pϠpN4[=ݲ|X5-sfp񇓞?Sz=~DhX P%BF)`jhT&3|>]&->FYK> ;,2(W86PGPp& PlaQY&1Fr'޻-92|SqMz>pg# 䮄92|9P\1 ALn]7J>pCƒ@ :!\wvFjS@Na"yd&q 2  Z Dv?eiA (72+]9zզ4N"z9VRccEr +e#CV0FC((ˈᩡjy[Iv˙JTC(F;N @0Hdt fE{Cj%ˈݱ9QSpAp:g8ҥBh[9NO/UCÃiuWO'b1S@/d13If9R<2!uhnhKc謺9)w>U}Ur'k.qp Za{|g!JZIy.5^-FE27zϝC뮍tf<7?FW%t۵<{-U] p<ڽb*,q?%XU=;(eR>|7v+pFQr+qK-򼽪O18t'MV"1i'V- HWnT~ ?Qֶr6_q9[\gAZIW,;PD˫QMZ$J ZI EHjhlTR`XGj罜Ar#Wi qJE s\[wJ'38l|cOj zݝM/}bwgU%6y2qlo/Db%@5VIPKCib'(lhԌ_8{*R,;s}|E59Ѽv-ԓSä^C'F/@T4)6QdP!64*O4g_[dCTk{ 9{10g:.`rdᑞyo8<иp rkvpk&RH+^Ua3n(Es*qx76oGIoٻFn$W8|؝vӮnْ ,db_d= 0au!UOT~{ڶ`RzlPjCpzkb0kMA qOåAvtǽ+E.3\p9YR>Im ;I~Z}w[cI6DMjSu{_!2&EL#9QԈ"Kb&YhwA~-Mr_XI+&l&2papn=?Ih,sÍ,W٪>T (i<vg_+\n .-9w.X4:wvfW%O|7J"qqy[v)*ɯYl;'(`ϵC`ܺuNom xriLϑD\0PcCƊnȺ`:hS *Pq1/o۹Ϲ*bkQr1&-" >ΔFg c s'{ηmR: *)c -3)wk[#Rb mBWjn& 2"B+$HN `s`ۧR2%֑s![}9#EֵWp򬴗TV)R3Y c3<BЗ26<6 䅺)^e:P.v~W24x!Ț^~iU߯='8,$_ͷᓟO8`r, ʀ" YH&w&k;U|ҵr4ߘń{.)H a Da7+ՓkJr,^)E #Hs,ݝᅗJTQYT,,^+ u;^Y7E1G%BS~1}[J ]!(jx|k1YZx8ݔ];dEJRrG4iep(M)%qIwrWf/d:/a$ou2LjC; T.4́(n/7_ L4WPvkgŗ1W4Oq6_G{t8([L`Zo2ūg*Y|Vʼ `9a$(_PiAP g3D1}!BRCsdo\?Oϓ @c7gŃ͟t0f2n`Gϥ"\fiI`%kYf:Xb1X,c9+c)_9/4V V\Q=&;)r^ܶr NYN$ Oo+kfn]_g^}^v{u !Sv A/{ alW@w6L5<ϊyaxy\-N7\-`5 ۰TK+v(MG KLg_BOeCNFr $nm+-ѕROM:ԅ`ͩxf>@z;rIMQ5RITvևMr&ZfQ, *B3]Pn^xԙB2_~A[A}dZx1Cn(/c^0f 4ckׯqJst./B8 Rd\z~nPmM&iBF2 "ٚzpTJSi=F6IoDNϻN"tzo*uDxmR"ިZTbT*&.Vl:W'MI,z\16"|?ZOݳe7ydW>h "̗|jiz^JHo!(]Q] N6MţfݳZ . Zk/=ߒԻηc Tks#%d8# 0vi\JȥqGC踸 eCƻWL&9yb4}KR\nrL9|z[OCv (Luûk#dK65VcܳY7w(B)P"6:o|C <0ΫPUY*(t=cdg{Ed c70[ƣ7GG $IBs F*ek>Nү\K0M1ls\xFU/y\p ɷL_X;-z=9jc6.1ҦW. /<ӗ)j8 j%BDl$RbdZQΠñj+zu4mW}o!Zko@8xa'0Z0[r0o㲅0Vp`Г'כЋ Vڶ6 5P۴*h ҮYupxo̶} >达vXէKkY6c=Z/W.[rP~XLw-RH }}qb*zw΋B)i7ů$kW [bXRE_Q.(WUq2bkNKnb#Yi_K 3/m}VDF̺,,2u}5Zm~ a:՚Ol#㻯٬[yR ^!|G"&}E̶͚x| !i-;/]i\(V@Ak_w6 =0,L9!Gw֪T$YjBZ|Mi}~Φ7AȎH)KNѠ:t0 f [%c cI)*ÛƤOBΥFV QjRG?9mМRWSiHQLsWyE6+IQ!_q.op&73!Э]xX0U٘Jq 7N#[9ifD|zG_|Cƻ8 ^o?sƺJ^ U;dqQ^2#?mp.yy}-etfB(0m<} qA kyp u)۱ig1㭵<Ïq}3HőV{FQFS%S)䡺_}$oeZX) *'?sTX^ضaW7eKhkk-~[n0mGiiRRS ݆)^QW5,z6Jt :o!К r*nmrt9lMRSH*e՗޺6|V̻$@XIWv[֎1xddž}\)`ͰWRy;)fBh1u|Rx;d)(0U{`e"zW3Y"Ro!鰷R+}q⦟7AX?$>Gs>i>tpm0ݱ":wp8 yWX5Εqz2tR#I("y`dBZgQY`߮N047 f%8fS0AY#YJOۯя??* b.a2?`3hdr59~J+Wo'?]ݎU^RbchqڗCʬ?,_>;͞KixeJ(4:-CKOo l he, .ȯ:u e\;,H'X9qG1*|VK-P.U.q]\:ɰ6^\ox2iωI ,}0x?U #xΖ3Emqm)6N/fzj R jS$%6qU vH^\ P'xW1tpHЃ\ ]Ư=$WC'+3QyaDp,/뼕P̵Try/'$][o9+A^vJD  xkǎvbw`RvԹeqXH>.UԧE>75ztKz{J:9MGI2{$z YK5}E.,jdq6)moyoǷ Ƅn\0/.kBsh#}c-*.'A'V2mRB&0zcvQz{қM7:^ԛo,;zX聄DZvн" eDP m7p =# R{X}vܦ0$IE:t&Zo;Nz^IF5 9ݣ x<8htk s`heDvPhYjNb|yj.Q>73JM;{ lPK-cQumt7-W]| ײ8|g1S4N+=_{Qck^'TS܌oxcm8_WGy@]kˍ4E5*QbW)3 $S.&le:z/ԘL*gm=M$/7v,cl-_ 惡@pپt"[ V3pZs5"RBV1iu Ӎ`C%rIqcx&&+h,۫=/aokΉ-b` 8vhUbRVZJNj#Z!REɪN602SA7~ C\LyLHWA!zXY61tr\ Zcf1p"`MhRՠKPN@UYކ񦆒[+>=ͯWxգ+n6ճ_/şPa?egj41j~śgGy3h{[\T]U ZHȜ[-r5V$G`XSEOϲ159WL*1wY1w;5!Z5cKB* CZ)k}bb:QՁ(ǚ.rp0+ZˤRHdj*@3`TQ{[ gM m؈u-Xl8 d?Ud9{̀b52L:c `DoB !8sy8s𻃣S}xUv ۶]f|EOA@Qt}\5(Ѧ I*vb*P8VjQ$Tɶ=wY=w۶v#eq'݋F,Sշ{v7+kL[ /!}o{ 0v:1=pHE)h[^Fa]p ?{Jt['ty,nVON>uj@xν 9o+MzPP`-yvm7Gzw=NjO9yboC6=Z?I=^r[yk6$ .,V^*鱻=V!=E;3j^mt\w{۳P{Fxk/J ysc9ՌKkULd\YG-eE uYe,1x򡒹:T2w;/NYwy(tey4AYĜK]6n|?]υ?O6׊AjᲚ2ںy bUk!-g*#(Nu4:~w{c_7Es{l7|~Is8 )xco6Cġ0I-yfBZۘ$Iy?^6Q%Gzf]uᵡ^Zᵡ^ƅצrn /yT=j o׽`Y^`6{IQ7CB&7㷟$ay{\?'GBW̨!z@07X-w ܔ0* YdkdN۔MUEeY>&a"xh]i\FG4e;P:HsB='23|ztfRRK9&{HEhSOm8Hk~H}F?p)^ 3ɸ,S;x$$X״XYχHWG[8Y񛸶 ZMӟ/Pі^ qtZ灡uZay`Ḙտf;Ia ֗B\9Ue=GZ "օz@iR`rDcMmi+ƚRWH7\&ѥZ9bU;]$ۊSwT?,bPeŅXk}3|28䋶DbaAV$KS EyĈ!Zm2j@Nw˘9мn37DĆ;|jSuFfxeS miF0*@V3mX/c4 PQi+[hUF J03V]cc;A2`E4V'z>W SW#+RAyO)V9sW"YSN,Tl. RDsc5:+Nrd=ed+ UǠ UI! :dcR˲V,Qf =[1sf7:1bnN("m|K>G3YMN_//D;wvFH+屵R]od!DX7)D7 ׆pmh ׆pm7\mQ6c!ԲyWbQ[I~u>:S?='Uq #|`!$"ZrxmiO e>]tJ6ZFD/:ykHĿ! cdm58-# 3E &*'US)ZWg[=@B P@*rY% .W!fU̯V>{`~+kF(AVRA42[y,a4eйbr⅄ OLʤ6EwmQ%I @f-%fzed0`چAecUP)s uNBQLn`]#C 9:h#]0\-yng1INOuqCP31xxY8O/]^9mkCbəi*:SR+S6,eS ai~ţ.TAY@{nL`ib'z:UECUW̱DYU3z~}(&*|jI8RoSF4r o2vk9 dg,Ä[ʥb+B_16Wa–\<"ɿ QXD3[ 8uw5;gli2=LλUt>|cvOk4vOk޵{53\ 98px4"$ij~EeiTeZQ™CT@v{cUMRc4$s8 3:>,Kc `|R'VRѱMuHsÍK^"G Ƣb?xg5刟8qx6tuF]H^Gӳ w&<=~Zɺ#jӝ8s\x:/y3*'_=7& !)&EsL-aVWfBT;' wj9͢9W99;hKXڣ㨯 FX3n0+t`-erE-Gг KBk 礎UDmx7LGWĵ6RGmg\;}%Nfh7O]8W&mNvu3$u0~'Kqbtͼ}H$Ĝ!BZڼ~,YJpYdePiÂcT_Yp;oBxL\;B(ijAsj@'ufe#,0*'Pano  zF#'A} d I^֗ogDI_!#WވzML/9S!k9 (ٺ˞VJ`GPY>xr<3XZQ*9&$2K6Te>-^~rZs̄Nw/0>g&ɑ`tWyyo}.4c~tA$q|]0?klYF}lhf[ >0:,sEțH1̈aYk1š}:ԅ0C}t ۶adPcU=u3@$[\5_UGo\\50U|аh G$.mY\5ʔ -#:zHF7mN Fո@z}ʞev Sh!Ш[Ĝ;h)c`7 `OZFQ}HЎlѻoʼt[Ãv>R a@Uب1[7b@2Cv&f [n8".[vh!#!,nvz~ΖAdf6N]݂vK>"[Av >RsC h.s9 nY݂n7B]RwdgGxHNmƤn1 "x-D/.dG/vפ>qFQJXo(8%nAv[zDV"z +,:j<)z)wrmʂI]ZF\+˨xl;i5o"h7jXO~vx "Qt:qPdԌ DNDE!jѪ#ZC"lpo_ʈXtW+ld+Ga„>)A0Wr kU-IbZ:G/U񰇐]ݘss{#,F)iBEYW;t$_VD2ÛcӱlILMr6u 1wEվǔBvg ʔ|4")`tX'$ 4f3جs8ϒ% W, 8wvT,YT,+8z;DRjMk*!0Ȝ]>k )Plنzk֥TLXEmSd)]lmaf޾vl\COS M9C!+n'žX"s B:`#fF[ޛd1q>^:nfuS:,Ss{׊OCYI8|Q5!g`-)Zq)c)Y5~mqJXXHBj7k26p]L!rYXVUψ!oRxF-nƽW,.GbPWjMK`bo+uzfl2B_OǧxŞlw!?^m@x咐[)`hf[i1^GjL?| A4 K%)ѧ>\r^סI}1hdI R0O!ߚ^ BēDr8o>Rձ/ *ްur3_Ylo!)x'}kTEBbTj{&3}CZCj5+},4xȾ) u)oxxCagN%B-TdKfQQ<>'4x ;1N2Ȼ1r S!|Ӧ s%-\LvT"{ ֒~ηS[v]3ת(Sy|v!F;ZH]tk('ZÈU+|- OmDcbXKk1K%wlX!Gs,}or8d1x|RsaHUI:wC"L)yb]'nRkucN;Z Lu ,wQڍ+{zJ{rѯ˹a~G{ɻg2O{bX~~ }?OǏɣA>[9|1L?Zw:ڮTOMkwb$͈ ;_AaFݽ[_<ݱ-"=rteq\rCΛ^`TzpQ%pI=" )k8xrU [cwr>?ly`&Ҷ{F6J4^?/+) y%Z/ZG|ɪ ^ f t"ruꊶ.dm]:Nɴw4B/+7ub4\ ɍ+F4AZm (EjPd2ݓQ%!}ŎnI0r~Y#AOt~{}ߧW3yۍ۹מvn΃-A[ VwƳ&mv.8AP7Oir2X\ɏ+Bn EHM=Y Uv߫?VrĤcu;L2ɬWyHf7M$R)&ɄqξW>zUV$!--hjUs0Z$UAS֑C\0 s+ڊ?3(~1uo}YQed\AEG;_P"LW:_`gembyt}] ow5Bn'nvToEr 5v.QW緛rSELFSV-wmq;{)VEүqs}Xq,9/Ke4Ӛ̨%G[fUEcJ|a#ҮJf%bqΡB)J"۫KCD lP6/1.;ŇȨ݌wB\ `OH47!%r2DxLxPbr(X${MF"u[ #5ziv!`M 1<14r֜ZJ-qdh2bҗ]9mz(Ϟ' 0tJ|IgO`]ܻ\,Ut;{:B@Z{C $ϒn!{9;u:ȳAgxoΓǬJE׋.y?x!JZOc܃ + }wY8 6$jȳ!A \P}Eɱ$Z34>@]$$r~wZn׭I,)MaPuS'grqL]A~%N13.7TC1\LhHBmdG2}$`'z:vL<)u2~Ytb#*O&75, [EWJ.W'yw{ If1p%",ak_v,ӁQuquKM*wt{xL隵oQ5v' D3Nկbn2Xl``jr=B̢<̞t^r6[FxR K51ѣrg+Yr4< :%^rH|ls=uZX&hF8IV ArϹ~WxLJ(OM&S)FD(Mh[ Ѵ4;M(sP6[.܋-&1Le⬋C<(ZMcIXmtud5v+=lGSBy漑 :w,=5t6xL^N ` T4$T}c&N)zj;փqTrjeQ{Wf,S{fg>jD*>?ZuV5!nT$E,hbz zCsg}5 Ro~|Uw}یi6ß ڸWN_ wۡc fo~iuɭك4Z`Q~bO`@;\ss}=朵RMWSƁ8Ȧuֲ&iLJ6*b}8Rk,0H9ԝ W3ާF^GTJǠ%䆣߉^XY/ mDd3ND<"bc8/aHcȵ w7ۤw'"=:Q>okNyI}fzמt.QW氋|rݚԱ72tFEv6@,Ex"(qQ dgBmŬ|B׋oHʵ MZ ؑZlP(.T6f10ܒ1@g2~047s$nK Ý8sMeqA2yJO.)O'cz紝mv2,9O鷺MbUuFt'8ehwT&rX;DOx`\B7 ǹto\ &$0T6Y[MpޒL *ѯ'3m *E:03=MNWۧ0p"%ۿ(3Y62ln똥1-$[?(2Gͦ׫,t/_F~1P! (@nk,,{Zyoϔ xkrij23 Ց|;^q.ʦBEe cO3ɜ6?Riy9t~02nƙ+1ԁ_FYG>D{sAJF5_3bXl\Cn0sȞtaX_sV#֗^TD+Mo`GQbM0L[ ;,ZY1;*8s>rǬ_k .A_jlŹ2%[㚽ĉmn܃,ɠec6;-=@ϋCvo 7\ y6\q5$r`<] r7e+KA~ȕtT0ϭ $g5]y6ɘ@3zĄ9sMqajFa>Z?oEhVeR?+1sR0d&<:Cuԃ*pޜܥ37%3!+KB*r)KM鳓v_jRD;7Pi8kuU$\\$QrŽݟ#3KS}5.c&8 u2妢ImUtM=Be0>I#vUbq/!`sXF;Hez3GU?d7 17 y#N?N.++}efr>u=~l_0eF~L `gHzhQ}Dx;eTlZ5۲mR(5zUa]@P0$L_pĘقsa{?h%UPm.V&k$kC9ه( b9,QtQ Ԑjt-lo+${5"fo6`9 ؽ=-<2pf08hr%șk-Dzt7lT,bO9/BՉcwRΓ$I$9X)A9Zz,AR5V*]q!J_)*Fb0㌨awKDdU{9H--j!NwS{)8sMVguA|r5 }9?S?;=/&rBܱFu*._lb0hlq" |qc%d/l)E]YeJiۇ"0U#fȹi}HWz {ٍqT1ٵkoٱ6LjMrw◄)dS?7K6*ca'~ώQ|bX[T@yжK ͭexr9`q_F n|-șk`a 0]\wI*[7Ms.' CdalJ%Tl甙B焾adD˨HE-}NX4uSZP]OʖFmq Ab17 >s !]/@FHrGɎҋ/<6pNٛ^';\U+{$n`=l팪D2jU+oh%!{Ql\7ly7 !>;MU7}z=7ZT}U۟k55q=k.j2ryP><_Lgxɛ$yQixGRˎ+_; V޴U芴˴/L~0.&7ӛW+}bo;+78ZoC)3G oߌjQ E݀K }<_ؿ 3:=9׳4/wo.v ۵me7F]2/ ^JvۍR+xܦ1ȋ-]yZreTwwTv]_oNsڻ?xpOe_m䬣޼E^u1bbؖ~~#cWùNMwKm'pߦ&vlŗ8-/niFqTV>/w=pYvHq'X8MzxԤ<ȟS#+[Iޜ~έjO4/Z ,n7@tښVUy}*Y#v6yvj.v&$͓Ay+{i6 C;^Ry,FyBWhBU_t  B-5ѳyUs nvMVh]{:O+ۣ8vavt+|  Ȼj4+[ݬ~CqvO4Kv1s"aN!pXnWhGɐ{,nK͍Z: wz B9)=:iwmMqz<uͬ$bkǞ {^G]0 Āկ?Y\L v2*3+KeBt.nCTk T}B=3){BOSdB99!k’ɂ"N` oҐVѩd՘S 4h+2$})۪ٔ2BOlFu =B ZX]HϦ>i R |\$jJ:WH"Wo\߻|uo XV]r1Ά) 3 bxe #$*8w3i;4 *j]l :BC S\POY, ڍ$*]$jT!z1*g&:I,4$fWlYhE,ilnدAOA,3‚HA̮2)c!$g j_d$ԺdI7MѐC5Ī@B*&UJV,lJ*^F X:;-Yr4#5s:xXc#al tNkɁ6#`!XpL8*ڙ}[K`!3.O 5r>X#-&dQ2\1֪0)03ϋ2I6$H=/Yћ\&cf_8^en[~3P;C~4t \䯖ģ0;UrUhl3jVP`싘jZ>@d*bU"z |eA45WuxÖQ00aS>K+в*SjDҠRD9ԔzM"hR>_{v:X)VQހF*\bY^Ӎ=;|yTɟOO Q <`uHփ&m4@ bOP.k&ML:#nV ֍<#x{ʃΨ! BSv=,yc3f";Ü;1I *b<9Ҫ*@hέQ>H]A PKklXvwzmRYg/}&R#ΧRz>2vJwl4~\"˥FYUa 2xBhlY )[3]|@ 6QHE93Dny\5*D(jMXpﲛ A=Vb5we!-,V#1"|,SeS qTmը"U` Zs`;&#l tYq4 =G O 6tmH`#8H'x~IJTDEVQűUJ d&Te%2^%#=tBl}DrR7X$Hb\RҾߗ%*ʨXOH"FDwӋ9#vWe{'[Qה`,* 68١1^iU,1h6RB`U%$FݠbE`-LB^5@>`J|ԜRx>eF2EEϙj/sF\̓m]ΎnA‘g0O.TX؋ )l 4ff͠\Co4;FP6kN`fޗT\%f_јrvsGzB>e1\@V,(ʡR6L::Wc zĎZFeN*-Sj)22kd%L#W@n4Mwz 5VsO*u_ rU.PHӤBB5g9bS7x gl!@&Iu=9T3CO1%EHIb;l#F=*ZI1S34 0C"WV%V?x0 *2P+,l4lm2:svýȵ]vxpd7K/~{j8OKZ|pe6=!mce??Ahy\6lk,x:A*x4ivLNxd3Ytv5[FA)&TfגWZg;S)*AA҂^X) (iO]b NIj˭f*^u~fW?azi5S$BGN 4f3NvSM묉d(MuEޱmsWaCkκSM#HBU=a]^5z*Y|7b[Ylnb1-!wr( _պ.kwd{`gF)DcMO8, 6g獺?4Flnk~17N#~ѢH }w4=!{xD#"c/Up=9#bG}爭SmU*,ZXy}NGBOj6@wKq=^; cTG L\5+3%WNgHmn] F&]o|׀LM{Y$O:'{ wew{ܙGs {⎾_{::l=6~E!0Y%W/r X3& f= ڍkY=$ݑ %d} Gm6{BJith[״Ҫ[QÏ?ۛLpq|ó>˫z\BO{wXMbl d>9~"^iy[q#{{l"~O߃5NwYI ԶCÓ ߢ)^py¿%]R=rFl5k'_?O]lH{"į.Bnw~6Z+X~8{_qݒH3rvq_/|J22%D8/P'"em 9t˫m+72|C5 p|33aKbrkmpC XzٴhgJv.iR0Wc({vLMGTv]Osh*{$bu]BƹW_IZ[vD1SvJX|*;[r&~t[DŽ-`@=L /fZxEίl`5n):%X{]߻8P3Q9=0iWDn/{k0%Os"d޷F=̴ 8!rUp,k=;=?kDj̷-3k7=J;uZ{=fcw:7ORO'zsXe%<:G#,(BY]ufLE[w:|9_Ws[9uWĨ앳țĜ> [l'r{w =zM)^Q[*9&,W/coN_`%԰o0춚Á::uF;PKFo)~3F)"C7NqMyc~iu&،^ND06{_V7UtZRSE6Ww2߹T nK7l|N>6X9'ua;!CO֘)&bF^}'hYP[u"vZS&|cU7C!ꪑWPv0Q&˾>9kE_[ܷڱ1ieД,668n;%dQ@ 3Fcv}t| ۻ[nE ßx+X/rru;q[^WQ7;.fN~E:ki1<-o-X_Nb-b4S٫FHo)G'[ώ//GC씣r4>~JgϕM۝[lك}y.Ը*:s{r6_פ nVFKUiSNVicz-Qq6 BvCA"索xw=%4[[Nf_4/y]Yo#G+_vf)}zn{ 얛-IYEIKbx4[_d1r 3u Toj滖(6H)a?7F{c>ذ|wNNv&<\Hxo  ֧_>(e~gݰ|ŗ bIċJq);d2߿fQ_=jᓯ'&)MQZmG/Y Fd?ECeI=8Y|g&J~La6Iq.r )l)E9D+X-AH4V\4+uZ{BL9sčHG[O.Ei2xbr3?/OLi"yu@[Lz,P (𵝠 Z Ge\:7:uoii2[;;kCD\)2-U+8On%,;U >oN9I\)i֚ȉqܛǔ1.@s& ETI<18;*poaО{`W#q-ro?mS:l# ցYc g4rdSZp b V qT@PD;T3 IXH Ev%oQi<,m4JL+XQwZ&DaD@vA-$Yb Pf)pX.hqBb 3z?Ƃh%j7Z#=ΒeI)%"]bb/p\"HĉVLTf߹M+em,:ll{Њ hGl\y7F=gN")w&u_mZQgIU38M.9@#OIXmȰf˹ylJU_z6)ە^խJ6Z/ڈwL))+Na^FIc+ܗZN>r"/o^Sk0AT~m/嫿 {xfss*L}ve%UoXCii+0|}v>QP peBd(aJc5>.FZ`b 1Aޱõe>ù_fqe\hZ!)K%hAHexXxX8.)E1Jֿo9Po51Dcxmg>RlsaWq/$PA7G1 j|?IgT⦿'{XT4y j6 Bg=<.w(n|z>RZPn?X3>J 1 >H0KOU^Vwk%jzmvʐu{N rЊ1> a}1,y2,5Q!~4 x WBt  j7F`)r^}0ų?+ djJa"loHH49#$P4û9ys{>,xMj3K6',?ra(=f$gE%G VF3Rܪh:D\C `o_ꛈZZ1g!,CSTu,~5t&]4!f,5.C>-+?TTu9Vܾ{߆%ބq6;L㭰"N Т.AF0lrOP*@)4(S&JCs̤<{sMX G=КnC#ض52VІ·TbfpTg),S) ,w^:ixKJh.ɍ!`?⧩O۲iU $ŻMM(k*ʺ:D Q֍x°5JFC0$ȕmKTkgڹfNl[Brԕw(_4RkMq2=.sege_Zi ழ_Md`hw@ #0RO!ظ& TTe0*'}|ygG3'/3UC9*۰^O)%,oOv2zu6^ T>oƟra/˓X I.iARXӬr"ђʪVF 4?Y46P ՓW:.HieD&y|56܌o_?g>iz2nrI YWup> J?b=F1VIyn>\nl eO")ЄvRk}xK\ϧ~ aS8?'u!y /`?-̯A<=ILMmLyR`<7e3R6(6H)AeQύaܜ֥v&V˷OW?8̵rV`i9d (# ^L&C vjvpfXmF!ӆ 8r-H hR-a^eIJ|9/ R7y$%;oߍFǘNreF1HdlHbcl rЊ1p3~B&*ޡЪUgyx;(W7|$AWV}߶Qm3]A`Gp6G˙5/D-A9 JTP9u*jn]9-saЫF11gw?z;cQkK?Y7kXHwI6W' &Mh0( E/92ŭԊh(c9(Kιn|;JsClb6Jn;f؄'h~c(&.jЫƭF¨R2kE}hi91)U`}MY 569};-Jq$rZ*}{涍%J/ ܇k;Rϻv9vR[qJ5CaLQ4Iv\)RD)pǒyN2' pUNYOӜmp ٠AosU hZxd{ nrRZ+|ෲ2iN9⌍Lc HjKsAJFȝU9"BoAE&=u 9\Hl@^cck?/ϱ5YXKH {NE[MHfbE$O+Ƣ3 ׿J`FćR`$C8X:a-(kEAtR[VP9cibg4W9*dBjqV yMB"8͢D šޙt-+,Iݪ"[&" )g`"[TPR8љR+MwS<[r 9^WZ`j6X +Rai-pRDVt=1Y-l=yt3}xdm̶7mа\ C%D6qs5ټѨZ׈=.dL^(n/tg&|ȕ1Ypmtf&+ʲoM)/a'bw؊kAdi4>MѸY07 FB> b_p P/e!1A) %5:l]6 qr:) XVG=biUqЁyZyڣ\1.XC^JfW٬7烡7Plx[zךj,'1[K8"?\0D:gA^f}\8Mδ: kMG<9[>9EFMi׾r:Fszb]Wy1|d,5tA_^ )0V/ȋۇeg`;o!U wop4^X!YOlۤ>s1Aݛ\1.kXΟ hT^sd\2t0%}xpi"R-G-6:9QD$x'#߰Beye{a^>9D3 KŪ`+ַkE$Ump Js#Y6:[r8fAb/]{LTC;]",6&Wj zHA;|^O.;KfHP@6&QNc LmZ/j=]Jx;,@i2 0߬3Hro5xzPK֢9W6& e6W(cX 2a0X7jTʛwڞ TNHLRBjZw HٵNj~Yj[T Iw#~HAWr+O*i%i"N:W8B[Ϳ: B`Nڸ]nG>˻ 0z{1ݻWz6aEvj] ;I۸^wk Pa#Hp$,IjH=rH[L,΀uy`:)t^ G `;5J{ѫ3]ɩhE$N ms ?. 9srQr)S/ֿ@njQݴp/F/T`nR M .`4 =ݝ ҄SO*nZ/jT7/&ڑZnv'ŃL:|^{X-!^K@  /`Q {ZXH> ,6&X֓Sn`MJ HJY)@h9qJjf -Ŗ}uB,LɁz>Q.)}zeD]|+).nʓbU"@FAd "ytB+yŧxk xÇ2$Pu}L+q6/~yuff|ǿWӻh&mUjun&+rG$Ed,{XP<Alkx\zJrķͼ \w/L\MfT >Dm1jԺ[LJ,ѦPVRVk ]#*;딉DDaXrڮ?:l=|XrѠfVD: Dky[뢻7OpJ˞lJ%1)"Hrh8IC3 fݹ|]7ni 70~ ^DvM4DM"?xSY5U~`{wP[Ƹ1tM#b_f"L,Sy$\ck5IDg1 FO(ڜQ882¹FknJm31&l:l(xl3 *!̬xL,^g9d |u,|h@@LGmq`Z biG`;`upt ݐ=JZJi'kØB7Em}u%y8Q֊Bu*eAɃs?\ KON~67=F ʂ( N,8* 9@.E[b.sُz9(@xE2)hg&{@uk=h6DƂ3[w[sR㴚jxm8C-؆[%6jeJec$Rqs?eǷGE1PsB3K͘(3Rp^*7-x%4=6=qgkl|ި\k)!eo;6C`V-eJ@u%zMл ?/Nj32H2~nյTbX)V-{u}EErQsFAMTPz3#lԈ=-R'}ᨣ8~sxG n(O@dPP6@6Z"ѬJ$ϴ%) ,9mf(QMzM&A ٟzMdb8.;*eo*ʝju"c8]OwEm 0JdYsAx `\Npt0x:<V};e p+o' ?)DMxJrdzˋѫ~!'\\&A|kMXQ++zXɸIzW=.Gӕe@` fL<32I,s͸Lְ@1>R6PTWJŜ#Pe9mv:>t4QaV}}ىuF1pI"fyxcYSi+/˴aaEfF*H#x?`PVr]ElS)(z?wC̖;eޮ;dYoo]ޮ&GٟDzkze-bB` 0,\eXns!upQroׯ%1ܖL=Gw#*>F%C#ے{[roK֊_}V-T݊ X;c>Xk}h<2#hF4D ȀA`&ۻX[^㝊%. H#2?jJ [{KrhG}$ޒ13% "o3"ɘ!qNVZGfJJ)ޔL3%Se6*6#ЕP>ZP? Z.Mv\,xHKfIoƪRSr`nVa$ ~ǻ-P}住{y#m0Ok\dL:3 (Y. E)FCω@;+ 첹,bM3e>~# .;2kGWF:dUX?;ǺFpe#EUњGveGj>{h=G>Fj0IA*5!E1nf5Zn5$|n֋kDݔ$KcڥJ' :T@k0%iI4oc v$n0O1Nld9fnqh֦},D?#$UD `xQz-h>Z-gaOCsd6QY %XgVr #Y/{rvElo@q./}(E;SMRԐ= e n=$qC#, cu3pDǓxڏd?Ǔo?M؏lY "L|62h&$^(@d?؏3ysegT2WV\WVi|VR>X|{wV3bKŋsQ*bWFR!-Ιs,>y7DWaz0&?G6r:|Wwa<)Dvl#q͒{Y^G#yP"Y۠ Ī R#0npj!}Tj7o9T-;x%?a9Z7&'onڪ ubRhLba ":z8;F uFC3)&cd9S$9 g-z-h,eFNJ%rJjvI0; _{+`1Mݢwvc;@n2a/ W޿.T`~ta8?z}=?=<لB=5[[|+/4{AWZ<$1jt9$1|lpi@niZ6,lR#{:{NQV檟>8x%*-3: ee3:ZhrPG{ŕNJOq6w r jkv8iɕqHt|4UZꮨ:y; L`:"Z3g'?A$b\;`̇AAcYnƽq2NYFW!SH4k ɡCā"|f"SHsaxN9f^؝TBrАNyfps×9h0A5u!"00sڊvc KIhD;mReN^\L E֤eNށH(:z7>4z2.yށ??~ a:5S:O*#W:FBAfNH`%^h-ZÔq]8˜o.̒ZF#"zVG< `U%u񀐂ϔ! U&t_+F7T:5=3T`q镘,SqATHqi$(!LwbN:+P _{V "eD (ᯪd`[,gBhK!W\0 .FI9ͭg~c< EO^~r$u ݤ?.'Fo+|)G+tvoK:޾]7lI%/8])ûo)1%פBLUFI~$%d(`Is14+]q_{o:y,ߏxf0ss*v6w4$_MZ"OϜ]OLo[.~(7Frk=Ŝ(}kGwܐW7iqkWgYV,r0VHoBMUy|hysۙ (&2r8xv8'k<1_)F)mBrG 'zZ.L7Y4d4 Ïٳ~|_&e_&exQ05RKDP\9P&@uY#p* i {1K_nZ}ggF]|/=0z3}:}-"@(o p~Mw邐G_~:.gd&Uwnp?}`avXU`P.0V%g5XD JԬa,@ #-`JpL`VI%%tÈޖ,KW(bV@kHa}NQ Fwi0%uhc+Tj@@Gr43g *'S:Ca|3YNwo0->yMgeRu}f_fXucӅϺ0EKy\E`WdXcA?& +$@pNjF@"kwr&U{P4lh?8sTi%fz;sX+9Ye&]d "CyGHr3͵z=atcͨ0%fWfd$?{DsĤkv(YJCc*m`eŁ@-, |)"N_gyt^-Sk呵w&{Y|U?z1^M.˙kfRp< wa2]\t?^PA^飳ܦ;+O>7?}3m{{wws?^ 9@ +U0տ=?4V`,2I}ͤ/,У{$lx| cr]_ SԔrXp3PY ,lv_xRvtѯ|?h:xA\pL)GRԗixUrbSഈo聪֙;y ڥi[t]8)T'aZg*%.2'y;j Q^;b+V+5*O^iaw N!jBC}E%kJYod_nBqe_r &"adL>)ExJ*ȜrquG_*k 4 ]gDL-YF\VHyw7ӹyӗ6y k<&3Zn?PuT/߭R!^n/KpZ`=(1}sg%QcY͛9 a<.cۄ>śz@*&5h䏖sYD\ :1GQ^ RUN&z H} Җ<9pw:(WȢc˜`TfEJ3yTsƎ-G$tP{ZWa|)Ev>&RDKQ9 ;/I9ƽ/kj9tWd x$bI.]E\ALx fTŽxR"y㣊a%W.DkQA(I"R1XӆUk2SFi !,9&EC6XT&~j bDK?~=Qn+(Ѫ`:Bk $m v7Fq0:`jYJSRYK䇳b~ᬉ n-T b!X녳H$nfE0aJ7,,N00 aP$fVD*r6KΘQ ֛WjH[Ғ*0 VUyc`*A90%UuT;7?4:#TktNgL#9;UwԀbКZdb آ,,9|S*$SX @kcqA`4=BzRJ*E23WqJD P0?DYd-;T*ޒ:CVp #S= ^[=:t[gp`lk` Sq ק Д;H¢:RՈgT>fs+51Rs\w CD9SH9BYhA01L-X%I)6R `o-0 Ev6^#Ŷ utP(M+"oGk:, ]&Hc4ja܊5EDS0/baqSU&FR(e0R*P!yDNR9`=1 kIX9 Wbt˯eaK.FW#I;aJŋpÜWAHwak<Ev=ko7ECEHx Ar|`I{.$orHaO?f ӬdU8aoBF{&H{="-(` I:Ӆ +>OarF1YO9,!zUKop{9TBzQQt7}cE.S#]G"4HUVؔIp1jH`#IW~~4x}sEYhKR1cv+גsp -VIj>_.'5XꐝG6h6n!2ű}f20bºnGcIۦwqbiޢǂ+AR=C9/3g[D/qzDԎi=i=Eb`;r~䥔䁒R8UHN@J(@㠗28 r3NZh*32<'sB7i4卬Ct2HTNUى'N1sVsd;i;$DhFG\=z+\k߉R'm=^'"#V1v "Vypb5["ޤ#c$o85yxdV$5HAT>il=aJԑWv=“a,8ډ}YM{=I$Uu6GHZ)4L]ȏ4!1R)1vs}-Ak['e nkZ=`^Gniwd 7Zۂ,D)vmvꄇ[K?=9 hjG\\{hr歭9=0$99^l{Ѧ!X2F9rvߑEcA_;yиT|WʹVZTCr-A?e_ku< ~żە/m'n PMϽ+\iK,*џ&2*a{Ũ1ۏOUg;jߣXm:uɮVK+TL>Ke'+~x{>"q$6 Dy{Ik;uiH]3e1-,QO>b䌝< G =62p8aNwNǰC 2vR`qɹ zN$=k0PCTG@S u$*$E=n#Mݣ?Lv0CǑ]I5IrvLÐ]d,|2b̠'Gh Νz/)Q:AbS]']ˠ\/Q`X3|3Ы3taB{x@p+FI!mRl$+N'ͣAwN'l/~q,8ؘ JB D9f #`3=J R8 +FЙ!J09W遧?ZddjU灌YPAWD-EZzvOrqu={'q/'yߞ@8М-"ӑSc J",0ۣ!}?{=\B3@=՞C(hu3$YB*x #E5q2ѡ2I!Tߋ/J!€^^[_v RM?'Ũ|l=fϐ}_ktz߽pPzw0<(Rv:S:< vh~腃36Ā l~ꅃPV'V AP#-Gqťv J:h0ѨƝ0pR 1ps J! vzB#:< R*_$BP!(@1pX7گno㛇np>xŲvyğbOxžq;<2zf()9f9M-.o~_&ߚFBHPǔp[*$}L ^Z  Cw~ܛ$KI_(hJ)iAJIJLvtaf56])2e7KKF^\{?)uǿ:EZwւ jV &ygtm"bq$gGlw]7ϛ dī5I螲]j~k.=?-gdn6ڍͭ!DsOOf0> QmxV^n>797^#(woq9Luk}p\^蛘D}o@'aШЛ  XF#֬HdWVL cʸJ񦌫{+qi}R&D3]*{[YEdnYI9$حI񑙄 1]$g\eE(c̸9ɣg\[k\lcDݝx1n#q88 Y9tz>VNށDn}Z[i {t{\0O`\U(RUIK&BWPk j"۪[T@(N4ĕׁѽkb_P0FEAbS}<'EaVg ~:<`Ox޳R8Gjr :iǰLlʚLe`KA5xiRd}峅yk̞$!دE5Y`++Lq ]G)!P++:A`P=&K F- C(l+01]N9 LjRN?BR25PY2f֦BD[r4Xe&8?ܷY.g۟X7䝠/K7?|ڐ/I_eU}Z\q3HKlBPBE) d' b.TdURWn3%cZ4"(g0Y`'<`Kh,he WX6d1RWE/7B LE?dz !wi2"ǓyDG"F:Y ,ȎVuWb/T;L*,亖6HdACCpv*Br.);?K<@K*m&2ڮfB[A1gKj9l$ސCɹչ3V0wXYڂhOFځED2m;906Ia/Q̜joFK EAVK H zfp+օ,B0 u!eu<(&fhv,qg*T(j9uzW+焱RI&' h8+B cH%Lv"b˦}2`` *mP7Gd[\J`Կ-%Hȁ Z@(r8sR15'Xf#Sa_gt-3m|'n:^A{unsSۛwa)Zۺbf?{Wƍ_W|1P$iqxA0$7Ֆ]InߟJWowoq|4Zr733;[kHHY>k#E`Cv̧\3ҽ7FCZ^(QvBBJD`` EG%KY|Z?j!޸8ysznIwW0ӻ|nq{x/o]` SosPȗ @+ X1J nsXh7. |P|vm,LЯ:7c?52OC{>BϽ,G;|~=?;D"?>WR|r>rs5Z8hyoqv>]0 qπ=eA"3@z2dI9R.(q7Q6IB*zF#faPI꫽5 %! H);|_CݖB?SFEeW!8#fʅ2drdS)T T:<)rD[|W#hZ 5櫫pp1Xå?݊+Z7+v\plԎB`JeW8okl=Q%#(.Hsf:V#@m3n\}-u?YPphdj ŧ6f_Ko^ jWjUXT{w`zqI<)VѶt`䗄j*TwEl=F*5l9>VöөT6-FEZNUCg4@pznj!lKƱhY%7!ciϮ;Vۆ[SWbMUI6"W0NkBcB©Q8g}5NLw *`"Ħ/T`!wSNUT'If#+&BKMnIW )dkЊ8\>VYd/b{|T-"hjؚc].6|eb֦eb]bsl.1q <]hg?#eQCX'KKoCciۥbȭXKRE=||z ǭ)>^8ZUAC D - C1k8Bbߪ<±<0pKf5[WV8TT?JvJkA?~`MqW\cw_?d[4R$΋͜ F:ժW$7$"p@*!@Zo!Ǥzjt| upg-4EZT]=algc`̠9Gm:nq1̖t7 ԥ>71n2|un4j2T QKQZM;F#|s?X*]>yذ oVNe;z]&f ,KR_ I)k\;t%shZ^Z̑l]V?NS2A36Y#c_}ϧU&`̄dzY|o Q}F=NVdNdN @GЦo^l>ڍGfjg\Dan.g 'E.# Ż嵿~27~12z#b, O]X$Js"``ē@)]#$D7)qm9v u*UIPtM`]_$XR=` *@k`UH!iTK$FD}F!Q߫QHt4[&0R鋖]0qX]p)cу{ = Lf l*D5&5Gp)5I&wOz?4 u={47(r;p_eѣ.+9$%xj[VHij<>.dw(!T O&(Re޾~MSuS1 ]^ō Dű6&}Xnŵs7 Ya q{B_]frm!176|;JBm. cա8*觋9VWIĶ N[BOIqwH8xO4 }BU ^xF2I3xE&1YN2/U432?q8)a\-a\ҡi9<,qc+:FEOE e Pv9q-[3fyOSc),ïf]4"x}ެZ EQ<\3!ZS鹡$Q;*MLK$/䳭o]aZ;'^"#JY ڗkC=vK)șrD :*0C[F[ԟ5FTAB X f#ë}!z9'ެaUr ǙLGI/`0KᩦY2$L2'<l3XTʬQ>yU ~#V{Ui*؎_7)+&dcr.$fc#0  ey )}R.f`%{Nd5h]f͙wo2_ҁEEXKyq3ʏV v0 zg; lIP֔(B &E.x{3,M"gJ57RR7 հ\=;>MKX5A4֍[g-Z\%gyi$;Ѱ+"TN-αՙA*$r֜8aO3bjBe9\F:t ޗLvESm)Qk@>\f4v(9"|֥G! ?z5"҈=(?2xR̀GO4z50擩ۨH5J|jG),jw`0ϢsxK~,ޏ&.y4FjХ.>ݎn^\J=[B zƐ˼N"rkyX3g^ qxSbSZV&}ﰲ.Yjݚj> Fl5I|OŠdc =N74A B5L|\ѻLjy!e$0\ 0-eh&_Y V^[UrU!#Îjt1gM;%Õ3cp3ɨ;3%OxRT_tW&G jGVY ŹD kARry8''KN)dBI JTN*$1zfb & QS\fpSJRz-*]9+S2TuǢP\WO2I:,?炇K-9E^Iy'. ADa}`  >o|qq$+s ؋-F߹2ŷd>;7+p7apqI5 Y]_~ vQdV>k6S#XА3hS.%*̗yHV~2ũVBc >|.c^UYFkGq)IVvc _kŇOb-%iu5is`)Rqj0}q?q &]؞VɒP$BlJM`4.IeIa׶f?a&7de q(5RchrMN8Z(|쮝M7gg)85΂ "E-CNrQTV1>NFr0a *_D^-Gqv-781' , \=!vz}P>1r5ۥ}׸ǿ0wd~w1dsZcy6Qlh>XR^Q2^|?C;[%08L-WLc>{7.??@ף<,UV rx 5WE^㝮zWc=zC8s:1b‡fܵھ\z=m64Y;w[h&..xb3y4d1[~+Ze󊰪FpZN:p%#[oF1X=dV@@kQ hb57eNhcL3yFN2ø%2^xXS}*) h3S45/gJ,sxavssϙNd^i.uk1Hjןu8GUi Ř)j'K0F<+6Z,h!Vc38Syt {tgU%W;楣FG(t Ǥh|w{,0I86}q2K_bŷЀ7𳽹rHV?2?'?^.Z(>Hc9@yx#, Syc=K$]뭃4y[pk߳|}_\5$!_&TPh)Ѻ FtRǨbzcm[dBc[hL:8w.@Ѻ FtRǨb".mmݒ nMH.eJc/" {-2m8^_L' ccLAA{{wY0Z[gsW? r>8x~ g]$$.e|/̲\x h'ŞRO$BR7$@)9J"Rec(Mi Lf l*Dq ru⺱3Y)nYSg!g5H)ʭd͗8/9G1>ԃ d>DZn&싅} ¡ .UMC|̝,V;?[; Y^"$J_7SONZGIy40)[k*-٢D/eft=m+Fpl/?+I?%Rq^۵rPZ$[{ef83I F7'_@rnn|Z 0! ƨ=_ ]|slo:c3-y!Fd[K6O]\"rbg)3xCY;HT–әJ֋fKlUcv ܏hCRE8J3C^S9eojntѥÀzSBe;V.wnUwNݨEI˱mwds<Yw85blv@ڜ18WAggz8Sԃ kuƤb Hw//iz$dAZ;ԞbpLhVrb%k)i#%"p1 d$)yF~{1)OVhқ1t;O4PwnXVdؕ r'.Գe[S3'֚874?mdsWigJW-b,Z~*Aet<[5r 0G.r 7 JW3ѱ$Nn:=\9vP kAvz&|Wg߀4dgW_`]NF b 3L(aC0r(dIPFRafX4s@9A9w> xI~p [&|Cg%9hrb䁾@+ &)풳?4VZy-'*L·Gr[]<QLWAɵנt#k@߽ ):/Lj*H-KQ) ( Xƈ ";8 QXim ` M̈ EDa"e !!s,X=RoTRZ  oT0ޟB\l[s6,dU2`C_$?\{nE_m@]" v:A{|P77 _L#Rsַtyp BU98USq$Ndeu@~.uAtܦ _=?ZM뫐_=Ԛ|R|MR*üRAH)YHqyiR2pjp s l0.q͚3m ߶SKMbcb@ZCL rSrh*41 GfI0-w6+.8*)НpQz_#דkuA.H S (3x8|c.uo-%h& Ao&v.5nSH 7Y߻ċ$rM$)$uHDAICsOgq%:IŞȂ#h;wÎhѩȾh_c,|`az{=짎;3s3{?|(tC?Us叓wHIH5*``;+XXv-\ڏ#sw ,1=| [Zvf”3ZpeHQ-vKP;]z^MV#>1? *IuXsq;8ĩ[}LIYy[[trl ~: .R s!nOhT}2++,vA2ThD\Ӆ˪U6<%. b!ƠVCT֬U~c1VYc~NiCN=p*~$9Rs9~>6OX7g&<D_}.>[;V'or$S0U[m=FmnU1(":U(ݮg?zjz1VТڭ ELqZi7nvAѩGv=?xJr Лn -ꐐ\D+b-'gZBf>Jy׋G/gU&gZ|"ZIn}sO1H N>h+ƭ%c]LU&vCB>r$S]ȾvNWݪaPlaG9z| n -nU\Dϖ)-iКMHjÓJ5Trb]b41<.rpσMgx5-d"sэژ'_~y~勉uw Ԏ=M?'w-A%o`^|xpgr߿qSzũ[^ͭɗÛx8_?ZSUGG.Zy#ω…E<~|#M'gK X|w|gz(!+2Pg@[|˳da(Ϩ*%vӉһIg{CٶK:D.R5j%wP>`4R(XuU,(X4XlUʲ lb;Ί]~.!\daɩ8bXR sl3uL>@RUSҩܤfAr %o ݌D6;za"73yf ??r58ziQ4Paơ(PCƊ\v뻡1>F!aV888" ijA`-܃#nw@DF?K)~g%lU+ TQѲH;^Vܶ9ʰޟ ˜`-sm欶H$U%Ec_eQZS29Š7П ƿ |ek\b~l})RP 1R.D![s܊ݾ̭,w#O1_4ʑZͅ\&AR٫Axsf^ j3 (fdA8@V3+I2KCUyeP72fD~1iRqowjB~-d]٬[ : `anXΏfK_'ƲZ"/UVp| VC.Ž- ~lwj. ~[B^j:e"0mfwo1XWBD mYb/-,n~ºr8 $Re+uW,]hTWa?&rU~K\zV4ndSm\e^z|Foj7h_43#XnPݬͳF˜aR$4GsM C]u箔՜Rջ^׻#89%kpecx6[<í6stqN i9}jn ?< AP`goi%w`tũ~?CZt@p4<`zo-S,Tbډ(ZX6!X3(FD4ww6(]~f 0DVlS\*)kml-R]ڮ Qp(nf~fMՖ}BiM 5 ` vnGg|KUN֋m<(dj+YeiS|k>dLY'au4K ,$$=WK9fiywZ֏v*-';Wx9K*/RS̙ ;6T%! E罿NǓe?AH~5r tkԎ^lrtl<-$},Rq`E$@a( 3;˸n={jK/nbX TAN{xЫdlonI= ĠdR-|{9 pу4:[=~g#QaC"#RrRd̰HiC$9Op;a%朙o iu z0J$+v>Q̊`ZIW@v>y}iLJ0Gz#6P:F0Xiji湏 p$};[ܚI?ŷ=zTo rd~ɵn>K2?_g[bsRZ(\j+D`I^|]9W`5؊T$2'ݛ49|"C6U9rӰv/|ú}CQЪM8Va)h ݲ k"xK ia X8q(ZX#l094k3`%K}X%[ Mj-@8TFXP:H .B̠v=IL" Ō`9a#BbqU06"| V;AE0EG6R)',t tVޕ8n$BicIy à0vnz IEjR7JDI#Y DRqJOO4Ç2RX) Lz0*h*SlJjO̖jr~'YQj÷IE3 ,ͤV8|vQ;r*n߲]$ 3U(JVcimt 974Eꦣ#t'dsuԺNK=uM풩O8RʦR!<@(jG^+!4 ^3.R3/,X( z %|)j .dJ8x`/Тa.L:Wh}}FI-"7:jPC0cJ:hE?쏾N,=-`1}`> \蔬xZ\ $?m{FK#B)q]<S"<\8=d($Pm˳%9ay"aS:ۥh !+Ej9ޝ-s2S[,^NwyobA${_2 ET-q)Tr$t~BQ #DB ZU+[,uტ`)^'g f~=yg&k'ɓ(g|rt?[_hw' k5 ziOS>tc=X$`^PtOF̭}Zz",qpsNs?$N=wc: MDf :I7\; [ .RQPMyEc:ƃI64 E]4֍ ׇ>PAd(kX,} *3!,(+K@H%C PP+^bj#h3Q>H\B F2"b8N)vX=fBWp$ҕ +XXeɖ0CPU(AƂW'"\agG}{6У +Smw< :Rba=Է0l+;NiHx a(c9nMl:WrMhroTZPvY&LhANנkNfmI*|>+b(O= .5K69nc<}4lB5':BTq,9J)R\)yT@x/ln}M1]H̠Z]Ug:(}Րou8MUmss(]J ^h2I;E\2DGO@gA8xY˜ )yNs-cʘ^[+{Q3F"+ri: PzQc)DvT!JUk9?L% S9dFNX^2ieMp=11f*[ãX>v'Ҟ1sכ(yݪ>D)rin"W zi[9evw/!l~$Sxf-9p #& \Ӆϫ&_1ǝ('mzc= W=HҾ0:f CW$ز;$M,,˼|j\f`T D8^ax:怟zSA]Q')ݘtLB``A%g/AtKm}b@߲wv zmABZPg0M"`B fQ7gЃ$x$[HPXgᆚr a"1i6 NďpLC8®ye?@yK/fEV6[! ^R?e)Z|AӀ"Pjra3ɑ $pD!9LL)y,Z!t⏋9j(r5sx֨mP}SBZ9ͫy*$}:wq~% Bdz [y &oFM]5/UzY:Hh9qЎ4K_A+E9Jn#= e^![IH(%o*N5LPh@r,E!ާ4d )z.:7)gObÇL>kOłw@6/ZФ0frd]9&#wrwÅ69҂)Er,+ߋl&2nX=V|>u`茖 \ |#y^-`K,w/-x(/Hd3zL]-Hr1j{}SlÜqMTY~h8-Ma}g:{Ñ}<߆ }.T.7 DrkAD`MWDl%. f!JJg!H j} };$X06Vxل6%M(؏wum 9XzzW/"ߐ.XĘKnL͙lanPy h DQND;j+6Qv^`EB2' ,AbofKQ;f)L6 ApT:W>MP}8l87{/ Mj# T53K!jNS$| 9aQmy/ S~ZPR?ﲒQ Uk,} b2S0a#6g؊ܖ;vHEǤVhՏ4G<cތ9#ܯey+F͝"?}Ǐ}8Ezhv]m_͵T h(Ljx=R{R?|[9լ{Ϸ +?'؏yMҜ: {%j /?'uWup(TzF\Sվ^?_Oef۲K]);fi]ͶIiy [>DJy+HL8ֻ(6s4ك}1z?n'N*Zr|n, BNFK<>QjERDz)4(SrmL2u1Ġf9t$w2*34J:Dýt ,m榊#fWQK4juKBB0a5U e4^\۵r j8:fW2ݹ^⮕r6@1#6^`l  8E\,+ oc9?(d, jvkľlޟ>Jr G;+ikHz^xѯ݉aj0BU0ζn]QN{8b$mG|wnb5^yE >PmԪ0Ar iG<%Ptl-XR)9+GOnhbĺ{(;o.t0=tfЃr$d`>SOۻMQx9*s7>&ްEf3tvĦ-@ _XQV jB!('.A>{iw"tFAxlN0w65*lEb0.P10?r^n9qY~&LuMȾ `aNP~py*4&sH`}H=B _zd xlJ[EAĥ@Mp.OM ?O8ԦK@I \2G<@s9sbN B`1NM.,^7L6]4p{:6HLB*9 χ%u(+H+1{ ;,|SX 2ƄdK J`9oL4{μm4u(y4쎜xR6BS 1뱷[/=mv#p0VFmrm^3.2Gh xMv|dV ]m6b*+2}1L9, AN= - $׫Uö݉Â"JG8Ǎ0tmDU !^g_̋ݔZaF7ITӨ^Šagr M=#vpj0'F@ mU?υb1]j9Cc9'##[5QVֹb(v mjaNue酚e{ĽH``WKy7⒣ј Is l~^r] aD_%_8[@u\}J s bZ|}̽d:UԃI#GKN7 4G/DȮJMfxY# :ZJiVe?n{p$HX*V9Y$Fg˝*eh4%'1corD,\HAٛG0K}. K<nˮgx Eزu"gn aBkSnG NN)#>ĈK>s %'\M^b-lݚ,~5IҌ+%P-V5{@Wccyǯ6%btݴq/hZWQriM,M 76TW[USK5k_##n#&uwW܃$ 9{|b؋IUd55FmT^cDnZg=5N|9! c*{5ʸxq-R94UQV_d_ٸjWWSfUrKUSz.zzї`~մ/b #M6)m?$N^"_$O\\%NE>b&EqJuI`\Ta*+]4,suÿWA( /7K~W\p]%UqYJ>g- p^Iy`SgCI#=IUS 2ou.>fPRp`]Yybk*l_=UD- Mo&$^j~8?$3A^|'Nz{1}8h_rM;r?Qs};Ki%MW]~AvB'jmwAOd &ɷ? w=+UR:a*qȟE;dLBV$q.T Ux;jkg[^T }:#jd 2U_.ԍC8DZk_ۯ/o&>~wF~ZWaRQהe%~8~ԻQ+`J{vpdlѬ-'E ;_rz2r5薪JשM?$QoSTh'u2oMH[ׇ?#ijחYa}h2䗇p 0j\ݸwdKK.6`rf#۫ػ7ndWy:Ǽ_ Nb$>[{K3Ic>?d2ۈ-6[бaU_XEAv`:l&K5QAS:d˄9K04u'DI"PCN :pUXD/U#?19aEB+9\\pZF7 'CJk}ikeHcֲۗfq闦vs b>,%/35 9 UnZ &)0x] 0vy#[YHMC!q x*v@RKjXYB 8*&rØCoѳ&i`Fg߆ZxCr H-8 N:}*'0ZGSg0*τVkL W6-qM#4ڊ|`N[=xj NgR94 ?'  ]~שQBuв`6UO!.C? 10T$Fdh; DfZc([{ .ΦUN3&BIgUHV}Fûrx3hڝވF89Uפ ђ>}hֹrdic s#0߽oOx9s½| Pw&P( ;=|\7Xw8\t<)rA3n2,^ }6^{1|X=PK^Bb n[e@%[ yky2ώ@09 ;V!@]&ʘ~& 8$b[kٽzݹaZ/R!-,N.ݡ;/ /1ING- #0>QeDzZ" X<tkVW]*%G7zBMN nVxurе(]~q8=Gdk_uR(1:@=X#O++\ 1~"~w4(rr-~˄ݡG [)}@k1A.Ep-6?'Ȭ>Auӷ]b_ʷoänς`iPCZ׿zssC؟ k6X00d 魎vAc6WP;„dR4G q:?HNJZH{XW8YIUk (Pgkˍ'fh2&%ϯ=ȭvQHδgjK6j}xH 1"!g]8j(ԳM3BgUݏof>-Ifq7o#W/r[87͕bj9 A+Y=& c47C&ZG՟) Rd(%h^=hgajLW&WhOC,Kƻ$a-){)%Xi+! ?v.I.$%}=pTLƇE)7_?&*W5Uߝyo}o_ˏ?-za)!~m5-zf9 am̴zk_c9~Bbto7xA9Lg Y{4 ?^¢=]HMgwyX{?>Lpkn |/ƷCB^ |Q3sS#L*o (v-1bY*Cmh*aJ׬*{yXAJCT*. H ͩ,f9Je}EKQrҔ),1ht` 9@Z8u XM>{UP\jRI n~ mf 2vbtWUϽDڕYYk&Bz"DјQ j7֠fI Z˝D+LJ ؖ&\XcO 4IĐ94@Zpa0@>pB+ʟ~0{B0,ǿgiL_ 5O9-#C8&__`̖x Ċ~OֽBvCj'$Q{vg(=IvXҨyLa+7~} S8F{,{\AP*tDӪlVżjaUWq"1FRՃGC(^xɅB;\zϱQYQ|8uYgEl4?͏BjїtU92C-^B=M(X]0\{s Y@q::(Nf!<>B'e cau 7>W//.Ni6ɞw\"zTPvE,k)Ĺ ^[8U~wюҀH@J&z:8Vp=TLP'mDCF;NSNkട8ۛǂlM  -C\KNr2ge^iDAvŵ[O f^Dԡ{2r O^`vÉelLX62] kӰ a L 5+4Pe-Z)jL4aį$+vn(3ǐ6Pay A])ާY@՜wv<OD;mhťG-Њ|h;DV\XF9+(hD;PvOz.a~DFt>s+f4N(&pΎSXn|]nN&ŨV$<4"~׬Fwn V31Wdw(~EB wBjLzdj̣j_qSN<&ppƬ}L}> =8g qo Endw{P`V7иwjv?Wڕ@&37Ys( {!8]Z:5ʄ=j/N72SHHȬL8h 2:+s$j9F,gdd4-Nq9x{V\{/N7 -N7;39Vi.ᢏ;t4?vǎm@Eԭ/tn%|j`AHԛ\zfIį{0D\˅HV>a*Npu=wzɐ9<| SxFONZj M~jv\'\4D{$`_c:~EvXkŻF_o._3 q<ɤ`1K,4ktaT#a$avOO׊ Qeߺú'^LY g!NK%0^i[aU&\ 1P@. ֖$%Y#DBy½gZ( ;wkCxpu9lu)\[ Q;Yg gZA,XxVRPZ+d΋P .HIXo&l8s0Y^n2W!e+SS7sKR*2pI!%spCʔ+[ t@c#0(0 EZ:"ރm]AdV˫B}M6NV W;yvS۷J`Ykмy} "]Bg`'ryբD=^V?seo 3%FiҌ ΥtWT"PYY6w{6I%).@w[i﷭!'?`՞idd1HiBiՊ BCd k ~;(v;c6/ PQ*O 8rOz퇿Y)}dO KNrcIUúgR^֚ͧ8_]{}I L&&7.c\nңؙ-AVpF5JbbBk|- !Z]P)v[5FYa@xByl:`iK!7C 8$E V)u!Xo&}/DcӎQ)droo^ ?z?q)Aq)N"pr_ 9qaJУ$MR@t&֭@O߳u+&4׺hGkMcuO 9=a 9l1$ t& E7'W~RMnTL֪wڌZѝ=v7 $woo[4|{Ѳ%u.v#:\Y(;3/GUHX M4ܺg.A &k2Y(A$U4슱hZ(.y熫MEVERu؆4볺b-9, D"H7 0L˄"qޤ b"ʟ,(EԹkW,^< kY,s|]6rE&EJr4ҊHTzҊHj34ox),շ).}XƫagɅZc#J7Oc]> OEѤ@M-,7-ºG7O21Lc:N0_G͒ty:6hYIŷ~:s6?%BZd6 /c'/,1 }<8+6`eζqo9 \kw5}\N_d4.<1mVޅu߷GqPD1F Y Iei}I}IFՂ;_\"9HU$ M-NW+-SS)$Gݶ-RDTjsjH=Xf×w4~8f-u/Ai_w'SkSB*v4bia%IWxrwi=2Mnëɋ}Eڰx3‹%ħd-W_yZ!^ 6Y[1B K,mZCD㶉+]=D|-unS [.A@xWg[__Vqь9U ШvuU LM>?S4K;Rx_ce7$+9i"qwe_gQY2Ǔ:[txb¡G耥Ӳa}N6 VylŚNrMg倓JNRa\xҀDMW2*˦?}ݷI XnQXyY'8j" `- | HRQdu\ GvC 9Uֹ,~=<٭F՟'sX7{y3 v13*aB)"# %:p#󈪨4Ü mb`au,6nb)޴K%-u+X׮c,Ͻo? ~w3s<*R'ӫ!,˾O@(*lA;\$pE;,pOZZlR"8PVDЍRM<_J51QADͻ/_/{-ꥬOJ'z[:[y,As>xtL(GHJW OYNK{sg$RDAl&T `DbEd(b9=(PÃXJHD L Vãsm R{ .]̆lN fkb!PqY?v^!vrXw_p>I%OO4j| $21y^1rrs]F, =rsۙMXt[ r*Q.hnby$i&ڶKe$PJ{TcLCK(_UmWOaŻGaTͿxՓZqA jF*"bgO@3" IJ<`taXyl<34Z\*(o$R2d2H` x(H@)|47ON\~3␁V"v"*lX}_. H B}~Xzig+@g"Biw66ӣyO!JX#M; O\,R?2gpo=aAVq٪5 &∞ uZh}u#h*)Ǹ@3i'[1iv}+   uhoD=b+5GKI$aH&U-67BQgDr}+t[1fAqW:g^@b/݄v0V0$ HxKS֩ !850UrB&!ZԲ)rxY#ȓzОbAgq@ ؂`N\:R3Kc@QXnf /⢐H@ôr4*!@QdAIL82"@В;^W@mǦ  $sFZEo.R r>^&.rV.uGU۷#"$b7??cf-x= Տ~^Xͫ ߊ ^2@WWroLSZ[w]J5P71g+3}ԯDS)fle8+J$gG#*i8#a&Gy x)ҦC޲vӘZCt5Ie1p Ha p, 'F3'Nܯng͚T8k.D\vtPV4W Qk.hHU ʆ"[s+>P E4ҚHnƄb5噰`k0fl*3Љ%^!1xA0#11#ՠhԈ *_<8 {eEq@hE+vk2\DyEǽ5`OJب `?J Q6?$&~ҡ \UDńJ/il?#%k`! P~ ɑK!61g4lh=k7)%E <N&m}!(EW6!`#86A>&QEǔ `)IDAl`vn_Xzš3o!t C^w^?nx8 ABX?sR HLL@R3|%v]mo6+B lӒ 5~jWM1v-9:(EvX$-Sx;~Lj8fH_*?RUmɳd};0"iBbn0"P(Hs #Qt4<@9i*: _P6H%*Δ"4h%!}Т2þ$lH᢯hf7?ٰ$^07ܬw^+pC.zw-)tG>||;Շi8Nw;}}7oFgJ ?h@*a:%Qj$ۏh-vҀ2۟t!ɴF_o 7jwU v.'I~?[$"xѬtrѸ|7ꋚD<`A0IX>Af{S3(ma:Iu`W4@vy/3 % 77^O&o=xRP3W^Lв L1d]u듳sp4*艣X-+w;?Ǽ5]3/yXWL'9m+DD6){5:Rua:b\* ZU>9#8GyXNSs==WY3(wbfqkP-[+4<52"Ѿq ]n̍-tM&w15t_;pCKo BDAq{58gr Zr~WvEY}CP{pN W/""$WniS\yL|[1lް|C¶ž֒o0ˇ}`2ke$RE114 t QH1(ܩ疷o`S \wahTw-]>շO.$\rj+ї &\.^Br%FW.j5V4?X!S tXu6+Ň|ލc}}Sh[_hGAе{4+L]$W5eSL;$WL7F0w0n韕ȣoAWꦽ-'^ԼYEPlG?e*Pqmy5wsvQuл۟޾Eh08?4_Y'VysYˆ֣ᣜ.:,mN˾wč5Fbx0#Q˕ O2;i>s`^7V܉_Kʤ7Y:sqh0k-W7gbn"-l2lv=bkFbXd8m`22=[jb1P9"um1i @ QM^܌ˇZ_]HP9lDL*_3/M*K$'!O^Φ$+)XSӯ /\/~ Ȝ"!𜢣r57 ΠC QH*sOOqhuC"vJE4$RM 14850ms (14:19:20.931) Jan 22 14:19:20 crc kubenswrapper[4948]: Trace[1555698662]: [14.850503953s] [14.850503953s] END Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.931364 4948 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.932110 4948 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.933076 4948 trace.go:236] Trace[674442428]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (22-Jan-2026 14:19:05.966) (total time: 14966ms): Jan 22 14:19:20 crc kubenswrapper[4948]: Trace[674442428]: ---"Objects listed" error: 14966ms (14:19:20.932) Jan 22 14:19:20 crc kubenswrapper[4948]: Trace[674442428]: [14.966355891s] [14.966355891s] END Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.933112 4948 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.939530 4948 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.982104 4948 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40954->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.982184 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40954->192.168.126.11:17697: read: connection reset by peer" Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.983306 4948 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40948->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.983611 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40948->192.168.126.11:17697: read: connection reset by peer" Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.984279 4948 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40966->192.168.126.11:17697: read: connection reset by peer" start-of-body= Jan 22 14:19:20 crc kubenswrapper[4948]: I0122 14:19:20.984448 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:40966->192.168.126.11:17697: read: connection reset by peer" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.078132 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.083412 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.092742 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.095071 4948 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d" exitCode=255 Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.095120 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d"} Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.186559 4948 scope.go:117] "RemoveContainer" containerID="88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.346504 4948 csr.go:261] certificate signing request csr-crrjx is approved, waiting to be issued Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.352628 4948 csr.go:257] certificate signing request csr-crrjx is issued Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.421683 4948 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.805347 4948 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Jan 22 14:19:21 crc kubenswrapper[4948]: W0122 14:19:21.805619 4948 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Service ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 22 14:19:21 crc kubenswrapper[4948]: W0122 14:19:21.805719 4948 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.RuntimeClass ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 22 14:19:21 crc kubenswrapper[4948]: W0122 14:19:21.805748 4948 reflector.go:484] k8s.io/client-go/informers/factory.go:160: watch of *v1.Node ended with: very short watch: k8s.io/client-go/informers/factory.go:160: Unexpected watch close - watch lasted less than a second and no items received Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.898910 4948 apiserver.go:52] "Watching apiserver" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.903218 4948 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.903553 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-ldzzt","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-network-node-identity/network-node-identity-vrzqb","openshift-dns/node-resolver-jwqmh","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.903879 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.903977 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.904284 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.904373 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:21 crc kubenswrapper[4948]: E0122 14:19:21.904421 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.904651 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:21 crc kubenswrapper[4948]: E0122 14:19:21.904701 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.904819 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:21 crc kubenswrapper[4948]: E0122 14:19:21.904861 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.904976 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.905006 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.910510 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 03:06:52.322076947 +0000 UTC Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.913059 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.914191 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.914510 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.914676 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.914980 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.915133 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.917329 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.917469 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.919289 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.921476 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.921717 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.921835 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.921536 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.922037 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.921749 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.927787 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.927855 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.951808 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.967627 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.991274 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:21 crc kubenswrapper[4948]: I0122 14:19:21.996552 4948 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.001615 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.011356 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.020305 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.034425 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037584 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037623 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037641 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037656 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037674 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037692 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037710 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037729 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037749 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037768 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037785 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037799 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037910 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037932 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037955 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038115 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038139 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038163 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038184 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038208 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038229 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038256 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038278 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038300 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038322 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038345 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038367 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038390 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038411 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038433 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038459 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038479 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038494 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038519 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038535 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038552 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038568 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038584 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038598 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038612 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038627 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038657 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038674 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038691 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038708 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038733 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038750 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038766 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038781 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038801 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038815 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038831 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038846 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038863 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038877 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038891 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038907 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038922 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038936 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038951 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039055 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039074 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.037917 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038003 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038703 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038859 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038878 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.038984 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039050 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039188 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039268 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039337 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039400 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039544 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039671 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039712 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039848 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.039988 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040101 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040106 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040258 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040276 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040399 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040436 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040497 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040550 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040619 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040622 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040619 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040642 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040716 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040736 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040758 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040771 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040951 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.040983 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041031 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041114 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041152 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041321 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041420 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041553 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041612 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041616 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041629 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041662 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041685 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041701 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041717 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041734 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041750 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041767 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041786 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041802 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041811 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041819 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041899 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041924 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041942 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041962 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041981 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.041998 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042029 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042051 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042072 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042094 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042113 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042133 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042152 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042172 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042205 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042226 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042246 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042266 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042284 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042305 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042325 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042344 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042364 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042383 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042422 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042444 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042467 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042486 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042507 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042528 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042551 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042571 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042585 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042602 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042618 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042638 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042660 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042685 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042708 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042731 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042753 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042775 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042792 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042807 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042831 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042855 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042881 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042900 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042917 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042931 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042946 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042960 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042975 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.042991 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043006 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043042 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043057 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043073 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043097 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043112 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043129 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043132 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043145 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043233 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043251 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043271 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043287 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043303 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043319 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043339 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043344 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043355 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043378 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043377 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043394 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.043422 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:19:22.543403633 +0000 UTC m=+20.873076562 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043446 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043467 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043485 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043502 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043517 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043534 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043549 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043564 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043777 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043814 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043845 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043866 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043890 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043913 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043936 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044416 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044435 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044452 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044471 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044488 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044505 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044520 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044536 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044552 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044569 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044591 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044608 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044630 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044646 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044662 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044678 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044695 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044715 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044739 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044755 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044772 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044788 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044809 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044825 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044840 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044856 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044917 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044997 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045029 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045047 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045062 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045102 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045125 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045141 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045158 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045201 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045227 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045245 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045264 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045280 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045298 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2dzw\" (UniqueName: \"kubernetes.io/projected/8a86f840-13e5-4b83-a9ab-018c2fa70906-kube-api-access-n2dzw\") pod \"node-resolver-jwqmh\" (UID: \"8a86f840-13e5-4b83-a9ab-018c2fa70906\") " pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045316 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045337 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045357 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045375 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qn9n\" (UniqueName: \"kubernetes.io/projected/355ee4ab-8609-4b0c-ad77-bff335632d2d-kube-api-access-6qn9n\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045394 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045451 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045470 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/355ee4ab-8609-4b0c-ad77-bff335632d2d-proxy-tls\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045490 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045506 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/355ee4ab-8609-4b0c-ad77-bff335632d2d-rootfs\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045521 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8a86f840-13e5-4b83-a9ab-018c2fa70906-hosts-file\") pod \"node-resolver-jwqmh\" (UID: \"8a86f840-13e5-4b83-a9ab-018c2fa70906\") " pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045539 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045556 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045573 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045590 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/355ee4ab-8609-4b0c-ad77-bff335632d2d-mcd-auth-proxy-config\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045638 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045649 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045659 4948 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045669 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045678 4948 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045687 4948 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045697 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045709 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045722 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045735 4948 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045745 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045754 4948 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045763 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045772 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045781 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045791 4948 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045800 4948 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045809 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045818 4948 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045826 4948 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045835 4948 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045877 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045887 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045897 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045907 4948 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045917 4948 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045925 4948 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045936 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045945 4948 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045955 4948 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045964 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046062 4948 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046073 4948 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046083 4948 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046092 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046103 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046113 4948 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046122 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046132 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046141 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046151 4948 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046160 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046170 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046179 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046188 4948 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046197 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043442 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043641 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.043652 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044566 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044645 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.044874 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.045884 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046576 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.046775 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.047348 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.047644 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.047652 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.047824 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.048077 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.048314 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.048483 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.048542 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.048544 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.048720 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.049713 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.049975 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.050059 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.050734 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.051170 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.051395 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.051436 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.051677 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.051830 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.052292 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.053325 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.053609 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.054492 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.055096 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.055305 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.055410 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.055846 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.058238 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.058388 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.058637 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.058965 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.059337 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.059401 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.060240 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.060557 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.060697 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.060763 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.061032 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.061700 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.061813 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.061986 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.060109 4948 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.062268 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.062445 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.063880 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.063901 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.063906 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.063963 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.064154 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.064239 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.064358 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.064516 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.065677 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.065835 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.065906 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.066385 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.066609 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.067230 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.067646 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.067698 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:22.567680214 +0000 UTC m=+20.897353143 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.068478 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.068539 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:22.568524537 +0000 UTC m=+20.898197466 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.078204 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.078474 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.078660 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.078875 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.079480 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.079735 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.079948 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.073286 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.080470 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.080639 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.080732 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.080959 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.082541 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.082723 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.082744 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.082792 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.084105 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.084243 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.084396 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.084507 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.084962 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.085004 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.085542 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.086840 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.087097 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.087122 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.087137 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.087217 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:22.587193223 +0000 UTC m=+20.916866152 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.088184 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.088554 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.089500 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.090147 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.090242 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.090424 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.090459 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.090478 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.090537 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.090555 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:22.590529795 +0000 UTC m=+20.920202724 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.090572 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.090785 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.090810 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.090946 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.091444 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.092320 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.092413 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.092581 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.093035 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.093744 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.096279 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.099650 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.100189 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.100874 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.104865 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.105613 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.105986 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.106337 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.106523 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.107088 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.107628 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.107683 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.110526 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.111495 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.111974 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.114951 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.115151 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.115236 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.115309 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.116054 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.116606 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.117530 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.119680 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.121683 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.123116 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.123245 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.123452 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.124528 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.124683 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.125377 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.125478 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.125553 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.125618 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.125644 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.125962 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.128162 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.128374 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.128866 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.129946 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.131111 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.131250 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.131495 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.132398 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.132903 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.133289 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.135194 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.136078 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.141025 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.143386 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.143403 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148186 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2dzw\" (UniqueName: \"kubernetes.io/projected/8a86f840-13e5-4b83-a9ab-018c2fa70906-kube-api-access-n2dzw\") pod \"node-resolver-jwqmh\" (UID: \"8a86f840-13e5-4b83-a9ab-018c2fa70906\") " pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148246 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148284 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qn9n\" (UniqueName: \"kubernetes.io/projected/355ee4ab-8609-4b0c-ad77-bff335632d2d-kube-api-access-6qn9n\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148319 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/355ee4ab-8609-4b0c-ad77-bff335632d2d-proxy-tls\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148341 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/355ee4ab-8609-4b0c-ad77-bff335632d2d-rootfs\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148359 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8a86f840-13e5-4b83-a9ab-018c2fa70906-hosts-file\") pod \"node-resolver-jwqmh\" (UID: \"8a86f840-13e5-4b83-a9ab-018c2fa70906\") " pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148417 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/355ee4ab-8609-4b0c-ad77-bff335632d2d-mcd-auth-proxy-config\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148437 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148535 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148547 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148557 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148566 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148579 4948 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148590 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148600 4948 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148613 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148622 4948 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148632 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148640 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148783 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148856 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/8a86f840-13e5-4b83-a9ab-018c2fa70906-hosts-file\") pod \"node-resolver-jwqmh\" (UID: \"8a86f840-13e5-4b83-a9ab-018c2fa70906\") " pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148898 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/355ee4ab-8609-4b0c-ad77-bff335632d2d-rootfs\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149265 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149533 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/355ee4ab-8609-4b0c-ad77-bff335632d2d-mcd-auth-proxy-config\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.148655 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149601 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149611 4948 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149620 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149633 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149643 4948 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149651 4948 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149660 4948 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149672 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149680 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149688 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149699 4948 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149707 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149717 4948 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149739 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149752 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149760 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149769 4948 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149777 4948 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149789 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149798 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149806 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149817 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149825 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149833 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149841 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149853 4948 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149861 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149870 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149878 4948 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149889 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149897 4948 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149905 4948 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149913 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149923 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149931 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149940 4948 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149951 4948 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149959 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149967 4948 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149976 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149986 4948 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.149996 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150004 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150030 4948 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150046 4948 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150055 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150067 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150079 4948 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150089 4948 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150098 4948 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150105 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150117 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150125 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150133 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150142 4948 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150153 4948 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150161 4948 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150169 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150177 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150187 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150198 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150206 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150216 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150224 4948 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150233 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150242 4948 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150256 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150264 4948 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150272 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150281 4948 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150314 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150323 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150331 4948 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150339 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150349 4948 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150357 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150365 4948 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150375 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150384 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150392 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150400 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150410 4948 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150418 4948 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150426 4948 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150435 4948 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150446 4948 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150458 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150469 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150481 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150491 4948 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150501 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150511 4948 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150527 4948 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150538 4948 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150547 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150555 4948 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150566 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150575 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150583 4948 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150591 4948 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150601 4948 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150609 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150617 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150627 4948 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150635 4948 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150645 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150654 4948 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150664 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150673 4948 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150681 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150690 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150701 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150709 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150718 4948 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150728 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150736 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150744 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150752 4948 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150762 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150770 4948 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150778 4948 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150786 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150795 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150803 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150812 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150821 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150832 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150840 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150849 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150859 4948 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150866 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150874 4948 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150882 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.150892 4948 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.166204 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.165691 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qn9n\" (UniqueName: \"kubernetes.io/projected/355ee4ab-8609-4b0c-ad77-bff335632d2d-kube-api-access-6qn9n\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.166666 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/355ee4ab-8609-4b0c-ad77-bff335632d2d-proxy-tls\") pod \"machine-config-daemon-ldzzt\" (UID: \"355ee4ab-8609-4b0c-ad77-bff335632d2d\") " pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.167754 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.169291 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.172001 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2dzw\" (UniqueName: \"kubernetes.io/projected/8a86f840-13e5-4b83-a9ab-018c2fa70906-kube-api-access-n2dzw\") pod \"node-resolver-jwqmh\" (UID: \"8a86f840-13e5-4b83-a9ab-018c2fa70906\") " pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.172116 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.174187 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc"} Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.174673 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.179072 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-jwlht"] Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.179408 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.181869 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.182179 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.182210 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.182282 4948 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-apiserver-crc\" already exists" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.182317 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.182462 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.182505 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.187605 4948 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.188505 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-z6s4b"] Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.189072 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.193470 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.194045 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.217288 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.217477 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.229546 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.241333 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.242974 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: W0122 14:19:22.245203 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podef543e1b_8068_4ea3_b32a_61027b32e95d.slice/crio-010ef3ff6e6a343e94156a587c4d591ccd7638335b446f0c6d95f83ca2b10478 WatchSource:0}: Error finding container 010ef3ff6e6a343e94156a587c4d591ccd7638335b446f0c6d95f83ca2b10478: Status 404 returned error can't find the container with id 010ef3ff6e6a343e94156a587c4d591ccd7638335b446f0c6d95f83ca2b10478 Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.245859 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-jwqmh" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.252341 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253330 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-os-release\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253356 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-k8s-cni-cncf-io\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253412 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-cnibin\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253427 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-system-cni-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253443 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-cni-bin\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253457 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-cni-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253481 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/feadf51a-0802-4335-b035-fe290bdd3960-multus-daemon-config\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253494 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/feadf51a-0802-4335-b035-fe290bdd3960-cni-binary-copy\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253508 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t49ls\" (UniqueName: \"kubernetes.io/projected/feadf51a-0802-4335-b035-fe290bdd3960-kube-api-access-t49ls\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253521 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45147dae-218a-40b8-8e9e-927e49d38624-cni-binary-copy\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253535 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-conf-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253549 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-tuning-conf-dir\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253561 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-multus-certs\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253575 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-cnibin\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253589 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-kubelet\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253603 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-netns\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253620 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-system-cni-dir\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253636 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-socket-dir-parent\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253720 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/45147dae-218a-40b8-8e9e-927e49d38624-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253764 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253775 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdhh6\" (UniqueName: \"kubernetes.io/projected/45147dae-218a-40b8-8e9e-927e49d38624-kube-api-access-bdhh6\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253801 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-os-release\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253821 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-cni-multus\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253850 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-hostroot\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253869 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-etc-kubernetes\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253908 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.253924 4948 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.262517 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.281260 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.300283 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.319303 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.343003 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.353216 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354575 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-os-release\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354554 4948 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-01-22 14:14:21 +0000 UTC, rotation deadline is 2026-11-22 08:15:02.483024572 +0000 UTC Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354613 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-k8s-cni-cncf-io\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354617 4948 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7289h55m40.128410543s for next certificate rotation Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354671 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-cnibin\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354696 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-system-cni-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354715 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-cni-bin\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354730 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-cni-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354743 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/feadf51a-0802-4335-b035-fe290bdd3960-multus-daemon-config\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354757 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/feadf51a-0802-4335-b035-fe290bdd3960-cni-binary-copy\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354762 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-cnibin\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354775 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t49ls\" (UniqueName: \"kubernetes.io/projected/feadf51a-0802-4335-b035-fe290bdd3960-kube-api-access-t49ls\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354814 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45147dae-218a-40b8-8e9e-927e49d38624-cni-binary-copy\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354833 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-conf-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354853 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-tuning-conf-dir\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354867 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-multus-certs\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354886 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-cnibin\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354900 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-kubelet\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354917 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-netns\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354934 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-system-cni-dir\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354949 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-socket-dir-parent\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354954 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-os-release\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.354979 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/45147dae-218a-40b8-8e9e-927e49d38624-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355057 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdhh6\" (UniqueName: \"kubernetes.io/projected/45147dae-218a-40b8-8e9e-927e49d38624-kube-api-access-bdhh6\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355071 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-system-cni-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355088 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-os-release\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355105 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-cni-bin\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355116 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-cni-multus\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355142 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-hostroot\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355163 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-etc-kubernetes\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355269 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-cni-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355296 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-etc-kubernetes\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355646 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-os-release\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355694 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-cni-multus\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355700 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/45147dae-218a-40b8-8e9e-927e49d38624-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355725 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-hostroot\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355753 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-k8s-cni-cncf-io\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355772 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-cnibin\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355798 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-var-lib-kubelet\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355828 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-netns\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355835 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/feadf51a-0802-4335-b035-fe290bdd3960-multus-daemon-config\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355856 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-system-cni-dir\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.355901 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-socket-dir-parent\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.356104 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/45147dae-218a-40b8-8e9e-927e49d38624-tuning-conf-dir\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.356144 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-multus-conf-dir\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.356170 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/feadf51a-0802-4335-b035-fe290bdd3960-host-run-multus-certs\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.356418 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/45147dae-218a-40b8-8e9e-927e49d38624-cni-binary-copy\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.356454 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/feadf51a-0802-4335-b035-fe290bdd3960-cni-binary-copy\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.372824 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.378513 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t49ls\" (UniqueName: \"kubernetes.io/projected/feadf51a-0802-4335-b035-fe290bdd3960-kube-api-access-t49ls\") pod \"multus-jwlht\" (UID: \"feadf51a-0802-4335-b035-fe290bdd3960\") " pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.381876 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdhh6\" (UniqueName: \"kubernetes.io/projected/45147dae-218a-40b8-8e9e-927e49d38624-kube-api-access-bdhh6\") pod \"multus-additional-cni-plugins-z6s4b\" (UID: \"45147dae-218a-40b8-8e9e-927e49d38624\") " pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.397586 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.418186 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.433592 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.445318 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.460289 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.473606 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.488415 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.497633 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-jwlht" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.502722 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.514226 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: W0122 14:19:22.515547 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfeadf51a_0802_4335_b035_fe290bdd3960.slice/crio-d49494811ba5a23a45f516062b401b30cc31a51ac2c89fd68b26cb5ac88647c3 WatchSource:0}: Error finding container d49494811ba5a23a45f516062b401b30cc31a51ac2c89fd68b26cb5ac88647c3: Status 404 returned error can't find the container with id d49494811ba5a23a45f516062b401b30cc31a51ac2c89fd68b26cb5ac88647c3 Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.517860 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.527416 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.542006 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.556403 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.556582 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:19:23.556566578 +0000 UTC m=+21.886239507 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.560933 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.563620 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-c5294"] Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.564654 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.567819 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.568125 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.568264 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.568494 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.568615 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.568731 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.568853 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.574357 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.589302 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.608101 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.632936 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.645099 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657624 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-slash\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657688 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-run-ovn-kubernetes\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657713 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-cni-netd\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657741 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657760 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657778 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-ovn\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657815 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-log-socket\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657834 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-systemd-units\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657854 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657871 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-env-overrides\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657887 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657902 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-cni-bin\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657927 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgjfx\" (UniqueName: \"kubernetes.io/projected/937749c1-3c7a-4f48-b65a-84b1335d64b6-kube-api-access-rgjfx\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657945 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-kubelet\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657962 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-systemd\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657978 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-run-netns\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.657997 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-var-lib-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658054 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-node-log\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658073 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-etc-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658091 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658106 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovnkube-config\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658123 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovn-node-metrics-cert\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658143 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658159 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovnkube-script-lib\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.658342 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.658362 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.658375 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.658437 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:23.658415761 +0000 UTC m=+21.988088690 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.658898 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.658902 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.658973 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:23.658953476 +0000 UTC m=+21.988626405 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.659136 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.659168 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:23.659160862 +0000 UTC m=+21.988833791 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.659176 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.659190 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.659201 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: E0122 14:19:22.659228 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:23.659219674 +0000 UTC m=+21.988892843 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.685870 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.701447 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.715439 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.734434 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758782 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758830 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-cni-bin\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758855 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgjfx\" (UniqueName: \"kubernetes.io/projected/937749c1-3c7a-4f48-b65a-84b1335d64b6-kube-api-access-rgjfx\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758876 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-kubelet\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758895 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-systemd\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758912 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-run-netns\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758933 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-var-lib-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758951 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-node-log\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.758974 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-etc-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759000 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovnkube-config\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759056 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovn-node-metrics-cert\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759086 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovnkube-script-lib\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759107 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-run-ovn-kubernetes\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759131 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-slash\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759152 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-cni-netd\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759187 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-ovn\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759207 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-log-socket\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759233 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-systemd-units\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759253 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759274 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-env-overrides\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.759951 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-env-overrides\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760035 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760071 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-cni-bin\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760344 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-kubelet\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760388 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-systemd\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760420 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-run-netns\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760447 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-var-lib-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760481 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-node-log\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.760510 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-etc-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761085 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovnkube-config\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761626 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-ovn\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761616 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-cni-netd\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761689 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-slash\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761702 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-host-run-ovn-kubernetes\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761701 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-run-openvswitch\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761716 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-systemd-units\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761789 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/937749c1-3c7a-4f48-b65a-84b1335d64b6-log-socket\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.761744 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.762319 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovnkube-script-lib\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.766851 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/937749c1-3c7a-4f48-b65a-84b1335d64b6-ovn-node-metrics-cert\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.781957 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.789660 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgjfx\" (UniqueName: \"kubernetes.io/projected/937749c1-3c7a-4f48-b65a-84b1335d64b6-kube-api-access-rgjfx\") pod \"ovnkube-node-c5294\" (UID: \"937749c1-3c7a-4f48-b65a-84b1335d64b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.808710 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.836489 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.859181 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.871083 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.884163 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.908029 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.911520 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 12:54:43.761977413 +0000 UTC Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.925224 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.938456 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.948591 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: W0122 14:19:22.958151 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod937749c1_3c7a_4f48_b65a_84b1335d64b6.slice/crio-8c49f096239e58d6b6e711a2d60d74c553380cbf14dadd63eaaf0410f8b62ce4 WatchSource:0}: Error finding container 8c49f096239e58d6b6e711a2d60d74c553380cbf14dadd63eaaf0410f8b62ce4: Status 404 returned error can't find the container with id 8c49f096239e58d6b6e711a2d60d74c553380cbf14dadd63eaaf0410f8b62ce4 Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.971092 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:22 crc kubenswrapper[4948]: I0122 14:19:22.991877 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.032235 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.070245 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.113129 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.161850 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.176954 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwlht" event={"ID":"feadf51a-0802-4335-b035-fe290bdd3960","Type":"ContainerStarted","Data":"ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.177003 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwlht" event={"ID":"feadf51a-0802-4335-b035-fe290bdd3960","Type":"ContainerStarted","Data":"d49494811ba5a23a45f516062b401b30cc31a51ac2c89fd68b26cb5ac88647c3"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.178845 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.178878 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.178892 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"010ef3ff6e6a343e94156a587c4d591ccd7638335b446f0c6d95f83ca2b10478"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.180738 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.180785 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"bb8533fa04fd11166e9f1e0cb8b7a49b1251555bab992d3e76790d1d73d343fb"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.182226 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" event={"ID":"355ee4ab-8609-4b0c-ad77-bff335632d2d","Type":"ContainerStarted","Data":"aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.182268 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" event={"ID":"355ee4ab-8609-4b0c-ad77-bff335632d2d","Type":"ContainerStarted","Data":"a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.182283 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" event={"ID":"355ee4ab-8609-4b0c-ad77-bff335632d2d","Type":"ContainerStarted","Data":"66b46e7a09a35678a24f278b9652c2b3d9507133f387f85d69b224118e0f2956"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.183457 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jwqmh" event={"ID":"8a86f840-13e5-4b83-a9ab-018c2fa70906","Type":"ContainerStarted","Data":"cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.183500 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-jwqmh" event={"ID":"8a86f840-13e5-4b83-a9ab-018c2fa70906","Type":"ContainerStarted","Data":"b7101017e58924c75ac05dd85d744e72f74b17a046b9795731813333da1ee59e"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.184337 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"8da1783885624fed48abceb480fee34ed43bf1ab72bc1dd501e4c3ae5cc9e7a2"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.185502 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"8c49f096239e58d6b6e711a2d60d74c553380cbf14dadd63eaaf0410f8b62ce4"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.186829 4948 generic.go:334] "Generic (PLEG): container finished" podID="45147dae-218a-40b8-8e9e-927e49d38624" containerID="decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0" exitCode=0 Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.187057 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerDied","Data":"decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.187086 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerStarted","Data":"00a3335e065e9afd575c45dee4fa4e2de70e9a7dea9354a09036df1586b618bb"} Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.198910 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.240109 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.275881 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.318861 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.363282 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.441552 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.491351 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.525827 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.551773 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.564530 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.564728 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:19:25.564704125 +0000 UTC m=+23.894377054 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.568067 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.601484 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.635139 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.666115 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.666153 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.666172 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.666191 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666303 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666346 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666385 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666309 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666401 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666410 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666421 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666361 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:25.666346342 +0000 UTC m=+23.996019271 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666502 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666554 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:25.666496316 +0000 UTC m=+23.996169245 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666582 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:25.666567808 +0000 UTC m=+23.996240727 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.666596 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:25.666588389 +0000 UTC m=+23.996261318 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.687851 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.709369 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:23Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.912532 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 17:47:04.165236153 +0000 UTC Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.957244 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.957426 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.957548 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.957570 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.957714 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:23 crc kubenswrapper[4948]: E0122 14:19:23.957812 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.962241 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.962939 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.963716 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.964372 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.965361 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.965918 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.966595 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.967197 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.967874 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.968452 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.970695 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.971405 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.972370 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.972891 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.973847 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.974401 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.975144 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.975961 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.976664 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.977403 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.978339 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.978914 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.979794 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.980479 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.981370 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.982052 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.983128 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.983618 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.984248 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.985174 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.985678 4948 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.985796 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.987811 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.988473 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.988925 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.990912 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.991637 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.992242 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.993337 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.994518 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.995092 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.995692 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.996690 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.997772 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.998295 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.999243 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Jan 22 14:19:23 crc kubenswrapper[4948]: I0122 14:19:23.999781 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.001097 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.001666 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.002592 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.003169 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.003763 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.004741 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.005287 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.192895 4948 generic.go:334] "Generic (PLEG): container finished" podID="937749c1-3c7a-4f48-b65a-84b1335d64b6" containerID="3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573" exitCode=0 Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.193652 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerDied","Data":"3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573"} Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.196061 4948 generic.go:334] "Generic (PLEG): container finished" podID="45147dae-218a-40b8-8e9e-927e49d38624" containerID="c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e" exitCode=0 Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.196132 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerDied","Data":"c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e"} Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.211852 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.223130 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.234937 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.250616 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.263186 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.281430 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.292737 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.304162 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.318869 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.342371 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.354404 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.368361 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.383857 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.397728 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.409382 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.427140 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.442730 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.458187 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.470406 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.506459 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.547947 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.589452 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.633599 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.670636 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.711733 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.747937 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.795201 4948 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.796898 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.796939 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.796956 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.797082 4948 kubelet_node_status.go:76] "Attempting to register node" node="crc" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.802593 4948 kubelet_node_status.go:115] "Node was previously registered" node="crc" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.802824 4948 kubelet_node_status.go:79] "Successfully registered node" node="crc" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.803837 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.803862 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.803872 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.803886 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.803897 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:24Z","lastTransitionTime":"2026-01-22T14:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:24 crc kubenswrapper[4948]: E0122 14:19:24.818887 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.822417 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.822473 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.822485 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.822500 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.822510 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:24Z","lastTransitionTime":"2026-01-22T14:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:24 crc kubenswrapper[4948]: E0122 14:19:24.833778 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.837247 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.837305 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.837322 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.837347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.837363 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:24Z","lastTransitionTime":"2026-01-22T14:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:24 crc kubenswrapper[4948]: E0122 14:19:24.852036 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.856067 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.856106 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.856117 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.856132 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.856143 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:24Z","lastTransitionTime":"2026-01-22T14:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:24 crc kubenswrapper[4948]: E0122 14:19:24.867947 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.871266 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.871297 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.871306 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.871321 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.871328 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:24Z","lastTransitionTime":"2026-01-22T14:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:24 crc kubenswrapper[4948]: E0122 14:19:24.885365 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:24Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:24 crc kubenswrapper[4948]: E0122 14:19:24.885530 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.886923 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.887007 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.887109 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.887185 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.887252 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:24Z","lastTransitionTime":"2026-01-22T14:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:24 crc kubenswrapper[4948]: I0122 14:19:24.913504 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 11:48:24.698940042 +0000 UTC Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:24.989233 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:24.989254 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:24.989264 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:24.989275 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:24.989285 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:24Z","lastTransitionTime":"2026-01-22T14:19:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.091347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.091374 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.091382 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.091394 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.091404 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.193606 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.193642 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.193652 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.193667 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.193679 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.200687 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.203762 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.203806 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.203822 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.203835 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.203847 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.203857 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.205452 4948 generic.go:334] "Generic (PLEG): container finished" podID="45147dae-218a-40b8-8e9e-927e49d38624" containerID="1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e" exitCode=0 Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.205480 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerDied","Data":"1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.218242 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.240688 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.254151 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.279975 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.291340 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.295993 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.296050 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.296060 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.296073 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.296083 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.303840 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.318257 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.334668 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.402422 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.403646 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.403687 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.403700 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.403722 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.403736 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.418947 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.431743 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.445529 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.458835 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.470645 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.492064 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.504364 4948 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.506286 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.511920 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.512114 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.512325 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.512473 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.512614 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.524218 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.535162 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.572155 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.584879 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.585208 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.585151974 +0000 UTC m=+27.914824913 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.596903 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-j4g7x"] Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.597727 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.613565 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.615257 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.615300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.615310 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.615328 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.615339 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.620592 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.640844 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.661331 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.681866 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.686453 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.686502 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.686538 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.686589 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mzcw\" (UniqueName: \"kubernetes.io/projected/2d0ef450-a908-4e0f-b551-de0724320540-kube-api-access-6mzcw\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.686620 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2d0ef450-a908-4e0f-b551-de0724320540-host\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.686641 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2d0ef450-a908-4e0f-b551-de0724320540-serviceca\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686660 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686691 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686728 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.686668 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686740 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686750 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686707 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686812 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686827 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.686804282 +0000 UTC m=+28.016477401 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686981 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.686948895 +0000 UTC m=+28.016621824 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.687001 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.686994107 +0000 UTC m=+28.016667266 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.686830 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.687111 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:29.687102911 +0000 UTC m=+28.016776100 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.717477 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.717515 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.717525 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.717539 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.717552 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.731869 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.773271 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.788547 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mzcw\" (UniqueName: \"kubernetes.io/projected/2d0ef450-a908-4e0f-b551-de0724320540-kube-api-access-6mzcw\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.788592 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2d0ef450-a908-4e0f-b551-de0724320540-host\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.788617 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2d0ef450-a908-4e0f-b551-de0724320540-serviceca\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.788921 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/2d0ef450-a908-4e0f-b551-de0724320540-host\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.789843 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/2d0ef450-a908-4e0f-b551-de0724320540-serviceca\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.808806 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.819717 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.819755 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.819767 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.819785 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.819797 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.839657 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mzcw\" (UniqueName: \"kubernetes.io/projected/2d0ef450-a908-4e0f-b551-de0724320540-kube-api-access-6mzcw\") pod \"node-ca-j4g7x\" (UID: \"2d0ef450-a908-4e0f-b551-de0724320540\") " pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.870220 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.908227 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.910339 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-j4g7x" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.914448 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 05:38:11.840722031 +0000 UTC Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.923352 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.923485 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.923540 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.923624 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.923684 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:25Z","lastTransitionTime":"2026-01-22T14:19:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:25 crc kubenswrapper[4948]: W0122 14:19:25.924476 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2d0ef450_a908_4e0f_b551_de0724320540.slice/crio-8e5274e0541f5a12e1cc8474026b83137e252a493b23b1ec12eb042985611f7e WatchSource:0}: Error finding container 8e5274e0541f5a12e1cc8474026b83137e252a493b23b1ec12eb042985611f7e: Status 404 returned error can't find the container with id 8e5274e0541f5a12e1cc8474026b83137e252a493b23b1ec12eb042985611f7e Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.953088 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.956517 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.956636 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.956977 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.957092 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.957229 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:25 crc kubenswrapper[4948]: E0122 14:19:25.957299 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:25 crc kubenswrapper[4948]: I0122 14:19:25.993940 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:25Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.026498 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.026538 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.026549 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.026565 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.026575 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.043072 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.069695 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.108109 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.130564 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.130602 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.130614 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.130632 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.130646 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.147253 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.191252 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.210807 4948 generic.go:334] "Generic (PLEG): container finished" podID="45147dae-218a-40b8-8e9e-927e49d38624" containerID="0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259" exitCode=0 Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.210852 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerDied","Data":"0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.213860 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-j4g7x" event={"ID":"2d0ef450-a908-4e0f-b551-de0724320540","Type":"ContainerStarted","Data":"e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.213895 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-j4g7x" event={"ID":"2d0ef450-a908-4e0f-b551-de0724320540","Type":"ContainerStarted","Data":"8e5274e0541f5a12e1cc8474026b83137e252a493b23b1ec12eb042985611f7e"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.233924 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.233962 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.233975 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.233992 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.234002 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.239097 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.269446 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.316478 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.337381 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.337423 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.337434 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.337450 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.337461 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.351000 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.389166 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.428736 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.441552 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.441624 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.441635 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.441650 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.441670 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.469193 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.511222 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.544667 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.544726 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.544737 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.544760 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.544772 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.549589 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.588947 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.632013 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.647737 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.647790 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.647802 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.647819 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.647831 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.669368 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.710626 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.750601 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.750640 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.750650 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.750665 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.750674 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.751967 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.790057 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.830452 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.853456 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.853498 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.853514 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.853533 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.853545 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.870038 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.911047 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.916127 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 21:20:31.104406189 +0000 UTC Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.953986 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.955522 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.955704 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.955778 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.955926 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.956093 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:26Z","lastTransitionTime":"2026-01-22T14:19:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:26 crc kubenswrapper[4948]: I0122 14:19:26.991449 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.030190 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.057967 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.058052 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.058066 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.058094 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.058108 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.072600 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.161409 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.161450 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.161459 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.161478 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.161488 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.220083 4948 generic.go:334] "Generic (PLEG): container finished" podID="45147dae-218a-40b8-8e9e-927e49d38624" containerID="7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280" exitCode=0 Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.220188 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerDied","Data":"7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.225825 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.243482 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.261307 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.263941 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.263966 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.263975 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.263988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.263999 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.272355 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.288511 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.304994 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.308041 4948 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.328521 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.366528 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.366580 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.366595 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.366626 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.366638 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.375462 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.412538 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.450735 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.470093 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.470133 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.470145 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.470161 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.470171 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.491733 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.533926 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.572956 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.572992 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.573001 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.573034 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.573043 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.574923 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.625859 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.655435 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:27Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.675626 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.675681 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.675692 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.675714 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.675731 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.778715 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.778759 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.778770 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.778787 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.778799 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.882074 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.882145 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.882163 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.882190 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.882205 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.916681 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 13:31:13.672416608 +0000 UTC Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.957133 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.957173 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.957151 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:27 crc kubenswrapper[4948]: E0122 14:19:27.957284 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:27 crc kubenswrapper[4948]: E0122 14:19:27.957346 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:27 crc kubenswrapper[4948]: E0122 14:19:27.957393 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.984262 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.984302 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.984312 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.984329 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:27 crc kubenswrapper[4948]: I0122 14:19:27.984339 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:27Z","lastTransitionTime":"2026-01-22T14:19:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.086425 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.086456 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.086465 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.086477 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.086485 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.189076 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.189124 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.189132 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.189145 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.189155 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.231396 4948 generic.go:334] "Generic (PLEG): container finished" podID="45147dae-218a-40b8-8e9e-927e49d38624" containerID="a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443" exitCode=0 Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.231435 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerDied","Data":"a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.245306 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.257322 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.267530 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.283153 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.290918 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.290956 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.290965 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.290979 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.290987 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.296261 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.310308 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.325531 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.336859 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.346513 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.357365 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.367987 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.379211 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.391716 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.392604 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.392646 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.392658 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.392674 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.392684 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.408480 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:28Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.484879 4948 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.495245 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.495276 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.495286 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.495302 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.495314 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.598035 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.598069 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.598080 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.598094 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.598105 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.701174 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.701223 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.701235 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.701253 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.701267 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.803723 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.803757 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.803769 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.803784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.803796 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.905911 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.905949 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.905960 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.905976 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.905986 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:28Z","lastTransitionTime":"2026-01-22T14:19:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:28 crc kubenswrapper[4948]: I0122 14:19:28.917523 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-17 09:43:41.144867536 +0000 UTC Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.008738 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.008782 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.008794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.008811 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.008823 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.111392 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.111443 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.111452 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.111468 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.111477 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.213712 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.213989 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.213999 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.214023 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.214035 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.237282 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" event={"ID":"45147dae-218a-40b8-8e9e-927e49d38624","Type":"ContainerStarted","Data":"ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.256123 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.275853 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.289270 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.300675 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.311605 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.320214 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.320264 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.320277 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.320294 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.320305 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.321310 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.334816 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.345672 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.359441 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.373837 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.384513 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.395922 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.407675 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.420585 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:29Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.422185 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.422223 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.422234 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.422250 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.422264 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.529666 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.529701 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.529713 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.529730 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.529744 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.633149 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.633217 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.633257 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.633267 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.633284 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.633295 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.633544 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:19:37.633520869 +0000 UTC m=+35.963193798 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.734316 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.734364 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.734390 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.734416 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734510 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734527 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734544 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734557 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734573 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:37.73455835 +0000 UTC m=+36.064231279 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734582 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734590 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:37.73457978 +0000 UTC m=+36.064252709 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734662 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:37.734645702 +0000 UTC m=+36.064318631 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734527 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734683 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734695 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.734724 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:37.734717814 +0000 UTC m=+36.064390743 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.735628 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.735660 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.735668 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.735682 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.735690 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.838354 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.838385 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.838393 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.838406 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.838413 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.917960 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 10:07:25.322164621 +0000 UTC Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.940779 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.940807 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.940815 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.940827 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.940836 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:29Z","lastTransitionTime":"2026-01-22T14:19:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.956141 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.956168 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:29 crc kubenswrapper[4948]: I0122 14:19:29.956214 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.956263 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.956335 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:29 crc kubenswrapper[4948]: E0122 14:19:29.956448 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.043391 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.043433 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.043446 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.043464 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.043478 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.145299 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.145336 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.145346 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.145361 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.145372 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.243910 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.244267 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.244314 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.247774 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.247801 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.247810 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.247822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.247832 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.257456 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.265775 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.267163 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.277177 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.287623 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.297196 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.305703 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.317704 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.328261 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.338845 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.350421 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.350456 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.350467 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.350485 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.350496 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.355240 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.367611 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.377076 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.388922 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.405293 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.421044 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.434614 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.446297 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.452440 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.452475 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.452484 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.452497 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.452507 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.456503 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.465857 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.479932 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.492889 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.504493 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.523289 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.542939 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.554743 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.555208 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.555302 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.555452 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.555549 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.556323 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.569885 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.582108 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.599100 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.611087 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:30Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.657841 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.657905 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.657918 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.657956 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.657972 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.759857 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.759898 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.759907 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.759920 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.759929 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.863136 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.863229 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.863244 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.863268 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.863286 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.918667 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 15:25:56.265622184 +0000 UTC Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.965434 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.965474 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.965487 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.965501 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:30 crc kubenswrapper[4948]: I0122 14:19:30.965511 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:30Z","lastTransitionTime":"2026-01-22T14:19:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.067168 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.067225 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.067239 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.067256 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.067268 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.170064 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.170104 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.170127 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.170143 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.170154 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.250724 4948 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.272730 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.273103 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.273271 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.273429 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.273569 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.383757 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.383795 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.383807 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.383823 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.383834 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.486617 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.486663 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.486678 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.486703 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.486720 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.588797 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.588841 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.588853 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.588868 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.588879 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.691164 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.691201 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.691212 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.691227 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.691237 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.793267 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.793300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.793309 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.793328 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.793338 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.896046 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.896097 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.896111 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.896131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.896145 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.919238 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 14:20:21.045058389 +0000 UTC Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.956485 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.956509 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:31 crc kubenswrapper[4948]: E0122 14:19:31.956639 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.956763 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:31 crc kubenswrapper[4948]: E0122 14:19:31.956899 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:31 crc kubenswrapper[4948]: E0122 14:19:31.956982 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.974205 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.988884 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.998210 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.998239 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.998250 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.998264 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:31 crc kubenswrapper[4948]: I0122 14:19:31.998275 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:31Z","lastTransitionTime":"2026-01-22T14:19:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.003688 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.022480 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.041769 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.054748 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.069168 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.082870 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.098185 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.099701 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.099752 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.099769 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.099791 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.099807 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.121765 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.133621 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.145519 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.156449 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.167607 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.202868 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.202903 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.202914 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.202932 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.202943 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.255224 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/0.log" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.257511 4948 generic.go:334] "Generic (PLEG): container finished" podID="937749c1-3c7a-4f48-b65a-84b1335d64b6" containerID="685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d" exitCode=1 Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.257554 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerDied","Data":"685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.258312 4948 scope.go:117] "RemoveContainer" containerID="685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.273383 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.290081 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.300637 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.305186 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.305232 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.305247 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.305266 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.305282 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.316212 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.330913 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.348843 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:31Z\\\",\\\"message\\\":\\\"r 6\\\\nI0122 14:19:31.456517 6268 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:31.456541 6268 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:19:31.456555 6268 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:19:31.456564 6268 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:31.456574 6268 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:19:31.456916 6268 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457159 6268 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457262 6268 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457394 6268 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457900 6268 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:31.457947 6268 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:31.457993 6268 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:31.458054 6268 factory.go:656] Stopping watch factory\\\\nI0122 14:19:31.458073 6268 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:29Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.359980 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.386303 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.395240 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.404806 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.407283 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.407307 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.407317 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.407329 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.407338 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.417108 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.426854 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.437089 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.450623 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.509262 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.509290 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.509300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.509312 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.509321 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.610822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.610867 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.610880 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.610899 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.610913 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.712804 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.712843 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.712856 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.712872 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.712911 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.815755 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.815781 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.815790 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.815803 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.815811 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.918085 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.918117 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.918125 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.918138 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.918150 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:32Z","lastTransitionTime":"2026-01-22T14:19:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:32 crc kubenswrapper[4948]: I0122 14:19:32.919696 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 17:03:44.324743139 +0000 UTC Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.020273 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.020299 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.020307 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.020320 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.020330 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.122955 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.123005 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.123033 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.123050 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.123062 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.225117 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.225150 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.225158 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.225170 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.225178 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.261276 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/0.log" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.264185 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.264293 4948 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.277902 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.293254 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.309131 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.321039 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.326718 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.326747 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.326758 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.326774 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.326786 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.333130 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.345718 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.359516 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.375049 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.387480 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.401328 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.411509 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.422577 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.429156 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.429190 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.429198 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.429210 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.429219 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.434612 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.452086 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:31Z\\\",\\\"message\\\":\\\"r 6\\\\nI0122 14:19:31.456517 6268 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:31.456541 6268 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:19:31.456555 6268 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:19:31.456564 6268 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:31.456574 6268 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:19:31.456916 6268 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457159 6268 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457262 6268 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457394 6268 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457900 6268 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:31.457947 6268 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:31.457993 6268 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:31.458054 6268 factory.go:656] Stopping watch factory\\\\nI0122 14:19:31.458073 6268 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:33Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.531883 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.531931 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.531942 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.531963 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.531976 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.634206 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.634282 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.634305 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.634336 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.634358 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.736868 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.736907 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.736917 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.736932 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.736941 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.839126 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.839166 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.839176 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.839191 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.839200 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.920063 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-14 14:43:05.488895739 +0000 UTC Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.942248 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.942285 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.942295 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.942310 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.942321 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:33Z","lastTransitionTime":"2026-01-22T14:19:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.956572 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.956837 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:33 crc kubenswrapper[4948]: E0122 14:19:33.956964 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:33 crc kubenswrapper[4948]: E0122 14:19:33.957249 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:33 crc kubenswrapper[4948]: I0122 14:19:33.957357 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:33 crc kubenswrapper[4948]: E0122 14:19:33.957672 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.045130 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.045165 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.045174 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.045188 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.045198 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.147395 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.147719 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.147923 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.148171 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.148374 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.250500 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.250542 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.250554 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.250571 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.250582 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.269860 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/1.log" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.270571 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/0.log" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.273775 4948 generic.go:334] "Generic (PLEG): container finished" podID="937749c1-3c7a-4f48-b65a-84b1335d64b6" containerID="159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f" exitCode=1 Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.273839 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerDied","Data":"159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.273878 4948 scope.go:117] "RemoveContainer" containerID="685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.274666 4948 scope.go:117] "RemoveContainer" containerID="159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f" Jan 22 14:19:34 crc kubenswrapper[4948]: E0122 14:19:34.274934 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.287602 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.305195 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.321989 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.353054 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.353095 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.353106 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.353121 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.353132 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.353266 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.367100 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.388237 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:31Z\\\",\\\"message\\\":\\\"r 6\\\\nI0122 14:19:31.456517 6268 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:31.456541 6268 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:19:31.456555 6268 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:19:31.456564 6268 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:31.456574 6268 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:19:31.456916 6268 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457159 6268 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457262 6268 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457394 6268 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457900 6268 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:31.457947 6268 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:31.457993 6268 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:31.458054 6268 factory.go:656] Stopping watch factory\\\\nI0122 14:19:31.458073 6268 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.399747 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.413306 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.422173 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.431269 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.443327 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.455024 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.455545 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.455570 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.455579 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.455591 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.455601 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.466035 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.479700 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:34Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.557968 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.558000 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.558028 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.558042 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.558052 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.659891 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.659935 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.659946 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.659964 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.659976 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.763300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.763357 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.763371 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.763384 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.763393 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.865413 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.865461 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.865477 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.865495 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:34 crc kubenswrapper[4948]: I0122 14:19:34.865508 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:34.920918 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 02:02:17.280115752 +0000 UTC Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:34.968081 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:34.968110 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:34.968119 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:34.968131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:34.968139 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:34Z","lastTransitionTime":"2026-01-22T14:19:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.032648 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7"] Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.033076 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.034816 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.044303 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.045672 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.056583 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.066753 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.072905 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.072958 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.072972 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.072991 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.073008 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.078716 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.090738 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.102272 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.113452 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.127260 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.138591 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.150315 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.160814 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.171823 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.176154 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.176192 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.176210 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.176232 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.176248 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.185025 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.192942 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqkg5\" (UniqueName: \"kubernetes.io/projected/90ece4aa-3874-4f6e-a88f-efcc2e745c10-kube-api-access-pqkg5\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.193033 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/90ece4aa-3874-4f6e-a88f-efcc2e745c10-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.193057 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/90ece4aa-3874-4f6e-a88f-efcc2e745c10-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.193093 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/90ece4aa-3874-4f6e-a88f-efcc2e745c10-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.202058 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://685302cd659be58f34d32bf114fb5388686a9b90bef5238e7eef374d1104265d\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:31Z\\\",\\\"message\\\":\\\"r 6\\\\nI0122 14:19:31.456517 6268 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:31.456541 6268 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:19:31.456555 6268 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:19:31.456564 6268 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:31.456574 6268 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0122 14:19:31.456916 6268 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457159 6268 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457262 6268 reflector.go:311] Stopping reflector *v1.NetworkPolicy (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457394 6268 reflector.go:311] Stopping reflector *v1.Node (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI0122 14:19:31.457900 6268 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:31.457947 6268 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:31.457993 6268 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:31.458054 6268 factory.go:656] Stopping watch factory\\\\nI0122 14:19:31.458073 6268 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:29Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.210864 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.210928 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.210947 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.210968 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.210983 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.213935 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.221899 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.225578 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.225603 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.225612 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.225624 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.225633 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.237666 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.241061 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.241089 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.241098 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.241109 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.241119 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.251997 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.255919 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.255989 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.256005 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.256041 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.256054 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.266732 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.269337 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.269376 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.269386 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.269400 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.269409 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.276533 4948 scope.go:117] "RemoveContainer" containerID="159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f" Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.276691 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.282375 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.282504 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.283902 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.283926 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.283938 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.283957 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.283968 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.289504 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.293830 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/90ece4aa-3874-4f6e-a88f-efcc2e745c10-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.293901 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqkg5\" (UniqueName: \"kubernetes.io/projected/90ece4aa-3874-4f6e-a88f-efcc2e745c10-kube-api-access-pqkg5\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.293934 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/90ece4aa-3874-4f6e-a88f-efcc2e745c10-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.293959 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/90ece4aa-3874-4f6e-a88f-efcc2e745c10-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.294666 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/90ece4aa-3874-4f6e-a88f-efcc2e745c10-env-overrides\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.294806 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/90ece4aa-3874-4f6e-a88f-efcc2e745c10-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.299684 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/90ece4aa-3874-4f6e-a88f-efcc2e745c10-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.306821 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.310439 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqkg5\" (UniqueName: \"kubernetes.io/projected/90ece4aa-3874-4f6e-a88f-efcc2e745c10-kube-api-access-pqkg5\") pod \"ovnkube-control-plane-749d76644c-wxjm7\" (UID: \"90ece4aa-3874-4f6e-a88f-efcc2e745c10\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.320901 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.334037 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.343933 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.343919 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.355714 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.370955 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.384975 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.386335 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.386382 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.386391 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.386404 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.386413 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.401876 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.414958 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.429535 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.440181 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.449640 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.457144 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.464855 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:35Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.488520 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.488556 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.488566 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.488582 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.488598 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.590982 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.591029 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.591039 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.591053 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.591063 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.694379 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.694416 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.694424 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.694438 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.694447 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.797847 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.797879 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.797887 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.797901 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.797909 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.900453 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.900497 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.900506 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.900521 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.900530 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:35Z","lastTransitionTime":"2026-01-22T14:19:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.921818 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 00:30:41.540057067 +0000 UTC Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.956353 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.956494 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.956865 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.956915 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:35 crc kubenswrapper[4948]: I0122 14:19:35.957059 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:35 crc kubenswrapper[4948]: E0122 14:19:35.957110 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.003687 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.003740 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.003755 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.003772 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.003785 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.105894 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.105938 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.105949 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.105965 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.105981 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.208836 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.208869 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.208879 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.208891 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.208899 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.281832 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" event={"ID":"90ece4aa-3874-4f6e-a88f-efcc2e745c10","Type":"ContainerStarted","Data":"0c64427adb6c4aaa04dbab0d9163a10b0dfdc8a221fc8f157c7d5ee329331316"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.311588 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.311641 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.311658 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.311681 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.311698 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.415080 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.415138 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.415155 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.415184 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.415203 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.519379 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.519432 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.519449 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.519470 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.519488 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.623105 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.623149 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.623165 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.623185 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.623200 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.726536 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.726597 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.726615 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.726644 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.726664 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.830117 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.830186 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.830204 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.830230 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.830249 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.905762 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-fxccj"] Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.906629 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:36 crc kubenswrapper[4948]: E0122 14:19:36.906759 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.922088 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-08 14:12:37.339197453 +0000 UTC Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.924081 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.933381 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.933466 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.933545 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.933583 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.933610 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:36Z","lastTransitionTime":"2026-01-22T14:19:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.942390 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.961616 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.984970 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:36 crc kubenswrapper[4948]: I0122 14:19:36.998859 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:36Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.011616 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.011676 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-29g7q\" (UniqueName: \"kubernetes.io/projected/1280bb23-c283-4970-a78b-6de7854469ac-kube-api-access-29g7q\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.012889 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.025365 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.036636 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.036668 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.036684 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.036697 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.036706 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.039872 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.052568 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.063884 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.079484 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.090958 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.106866 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.112498 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.112576 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-29g7q\" (UniqueName: \"kubernetes.io/projected/1280bb23-c283-4970-a78b-6de7854469ac-kube-api-access-29g7q\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.112842 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.112960 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:19:37.612939384 +0000 UTC m=+35.942612313 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.118087 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.127753 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-29g7q\" (UniqueName: \"kubernetes.io/projected/1280bb23-c283-4970-a78b-6de7854469ac-kube-api-access-29g7q\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.131790 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.138194 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.138217 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.138227 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.138241 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.138252 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.142544 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.240657 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.240697 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.240705 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.240719 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.240728 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.288464 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" event={"ID":"90ece4aa-3874-4f6e-a88f-efcc2e745c10","Type":"ContainerStarted","Data":"5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.288502 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" event={"ID":"90ece4aa-3874-4f6e-a88f-efcc2e745c10","Type":"ContainerStarted","Data":"394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.289695 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/1.log" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.298826 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.311532 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.323423 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.333971 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.342413 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.342450 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.342461 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.342476 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.342486 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.346431 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.358435 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.368934 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.383122 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.395364 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.408520 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.418804 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.432172 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.444448 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.444505 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.444517 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.444536 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.444549 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.447251 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.476295 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.499802 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.513230 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:37Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.546898 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.546935 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.546947 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.546963 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.546974 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.618634 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.618794 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.618846 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:19:38.618833258 +0000 UTC m=+36.948506187 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.650400 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.650460 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.650481 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.650508 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.650529 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.719637 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.719876 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:19:53.719858998 +0000 UTC m=+52.049531917 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.752949 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.752990 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.753004 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.753051 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.753068 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.821047 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.821099 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.821128 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.821153 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821194 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821270 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:53.82125369 +0000 UTC m=+52.150926619 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821289 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821344 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:53.821326301 +0000 UTC m=+52.150999300 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821409 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821422 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821434 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821464 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:53.821452375 +0000 UTC m=+52.151125394 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821514 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821527 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821536 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.821559 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:19:53.821551948 +0000 UTC m=+52.151224957 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.855078 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.855111 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.855119 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.855131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.855142 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.922733 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 18:00:21.185619645 +0000 UTC Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.956166 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.956292 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.956358 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.956361 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.956490 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:37 crc kubenswrapper[4948]: E0122 14:19:37.956647 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.957170 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.957199 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.957211 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.957227 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:37 crc kubenswrapper[4948]: I0122 14:19:37.957238 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:37Z","lastTransitionTime":"2026-01-22T14:19:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.059790 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.059824 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.059834 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.059848 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.059858 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.162957 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.163036 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.163048 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.163065 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.163074 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.266196 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.266260 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.266277 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.266297 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.266316 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.369237 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.369276 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.369285 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.369300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.369310 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.471125 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.471163 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.471172 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.471184 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.471193 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.573731 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.573766 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.573775 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.573791 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.573804 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.630596 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:38 crc kubenswrapper[4948]: E0122 14:19:38.630871 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:38 crc kubenswrapper[4948]: E0122 14:19:38.630978 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:19:40.630951665 +0000 UTC m=+38.960624684 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.676519 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.676561 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.676571 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.676585 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.676594 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.779810 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.779856 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.779865 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.779883 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.779892 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.882821 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.883295 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.883330 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.883360 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.883380 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.923075 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 05:33:15.13795873 +0000 UTC Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.957168 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:38 crc kubenswrapper[4948]: E0122 14:19:38.957415 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.986668 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.986712 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.986724 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.986742 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:38 crc kubenswrapper[4948]: I0122 14:19:38.986753 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:38Z","lastTransitionTime":"2026-01-22T14:19:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.090157 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.090198 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.090207 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.090222 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.090232 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.192643 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.192700 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.192744 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.192762 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.192775 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.296581 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.296638 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.296649 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.296669 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.296681 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.398497 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.398537 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.398552 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.398568 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.398579 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.452144 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.470352 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.489031 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.501532 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.501563 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.501576 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.501591 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.501603 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.512687 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.526343 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.541454 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.561154 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.575516 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.596743 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.603773 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.603802 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.603812 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.603829 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.603840 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.610395 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.621943 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.636306 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.654488 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.671262 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.683518 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.699155 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.706417 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.706470 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.706485 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.706506 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.706526 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.717627 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:39Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.809208 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.809260 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.809284 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.809304 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.809321 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.911878 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.911928 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.911937 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.911951 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.911962 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:39Z","lastTransitionTime":"2026-01-22T14:19:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.924264 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 14:01:56.464159137 +0000 UTC Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.957059 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.957138 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:39 crc kubenswrapper[4948]: I0122 14:19:39.957183 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:39 crc kubenswrapper[4948]: E0122 14:19:39.957257 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:39 crc kubenswrapper[4948]: E0122 14:19:39.957338 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:39 crc kubenswrapper[4948]: E0122 14:19:39.957906 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.015842 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.015890 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.015902 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.015921 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.015933 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.118960 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.119101 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.119129 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.119164 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.119185 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.222927 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.222990 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.223005 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.223052 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.223070 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.326860 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.326912 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.326921 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.326940 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.326951 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.429472 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.429542 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.429554 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.429572 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.429589 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.532419 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.532489 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.532510 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.532535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.532551 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.636326 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.636911 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.636935 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.636963 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.636977 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.652834 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:40 crc kubenswrapper[4948]: E0122 14:19:40.652995 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:40 crc kubenswrapper[4948]: E0122 14:19:40.653079 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:19:44.653064519 +0000 UTC m=+42.982737448 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.740472 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.740545 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.740567 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.740594 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.740612 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.843232 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.843290 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.843330 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.843347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.843361 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.924851 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 00:51:46.269735844 +0000 UTC Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.947702 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.947752 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.947768 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.947790 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.947807 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:40Z","lastTransitionTime":"2026-01-22T14:19:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:40 crc kubenswrapper[4948]: I0122 14:19:40.956812 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:40 crc kubenswrapper[4948]: E0122 14:19:40.956992 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.050694 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.050736 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.050747 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.050762 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.050774 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.153901 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.153996 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.154051 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.154083 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.154106 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.256801 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.256834 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.256842 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.256854 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.256863 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.359752 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.359821 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.359838 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.359863 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.359880 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.463675 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.463735 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.463756 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.463784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.463806 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.566268 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.566306 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.566317 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.566334 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.566345 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.668815 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.668921 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.668932 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.668944 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.668954 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.771597 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.771643 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.771653 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.771669 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.771680 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.874461 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.874508 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.874520 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.874538 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.874551 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.925298 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 07:51:29.420069244 +0000 UTC Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.956777 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.956815 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.957050 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:41 crc kubenswrapper[4948]: E0122 14:19:41.957057 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:41 crc kubenswrapper[4948]: E0122 14:19:41.957129 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:41 crc kubenswrapper[4948]: E0122 14:19:41.957185 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.972649 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:41Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.980999 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.981076 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.981102 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.981127 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.981143 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:41Z","lastTransitionTime":"2026-01-22T14:19:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:41 crc kubenswrapper[4948]: I0122 14:19:41.995233 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:41Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.007698 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.022582 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.033895 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.046145 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.057930 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.073582 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.083443 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.083482 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.083491 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.083507 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.083517 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.087710 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.116266 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.133248 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.146725 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.163466 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.178621 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.186153 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.186197 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.186211 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.186227 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.186241 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.189730 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.200111 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:42Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.288130 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.288715 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.288800 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.288875 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.288943 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.392744 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.392788 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.392806 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.392830 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.392848 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.495960 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.495992 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.496000 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.496026 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.496035 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.597717 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.597794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.597818 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.597841 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.597860 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.700573 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.700666 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.700684 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.701160 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.701212 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.805169 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.805222 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.805236 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.805255 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.805269 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.907986 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.908046 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.908057 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.908076 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.908087 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:42Z","lastTransitionTime":"2026-01-22T14:19:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.926200 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 10:01:18.553865887 +0000 UTC Jan 22 14:19:42 crc kubenswrapper[4948]: I0122 14:19:42.956390 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:42 crc kubenswrapper[4948]: E0122 14:19:42.956535 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.010936 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.010995 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.011045 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.011071 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.011091 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.114228 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.114284 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.114300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.114322 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.114337 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.221059 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.221240 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.221334 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.221367 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.221439 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.324490 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.324541 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.324553 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.324569 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.324581 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.427902 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.427954 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.427966 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.428000 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.428025 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.530567 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.530609 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.530618 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.530633 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.530643 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.633089 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.633129 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.633138 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.633151 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.633162 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.735763 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.735807 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.735819 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.735838 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.735850 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.838307 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.838375 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.838400 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.838425 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.838444 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.927091 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 19:14:24.700027879 +0000 UTC Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.941268 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.941313 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.941324 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.941343 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.941354 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:43Z","lastTransitionTime":"2026-01-22T14:19:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.956913 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.957183 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:43 crc kubenswrapper[4948]: I0122 14:19:43.956986 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:43 crc kubenswrapper[4948]: E0122 14:19:43.957209 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:43 crc kubenswrapper[4948]: E0122 14:19:43.957397 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:43 crc kubenswrapper[4948]: E0122 14:19:43.957466 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.044511 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.044555 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.044567 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.044588 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.044601 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.147751 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.147792 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.147806 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.147823 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.147836 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.249725 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.249957 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.250100 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.250199 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.250285 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.308060 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.309341 4948 scope.go:117] "RemoveContainer" containerID="159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.353139 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.353199 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.353216 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.353238 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.353256 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.456123 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.456160 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.456172 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.456194 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.456206 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.558947 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.559007 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.559051 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.559072 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.559084 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.661519 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.661561 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.661574 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.661590 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.661600 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.695133 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:44 crc kubenswrapper[4948]: E0122 14:19:44.695372 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:44 crc kubenswrapper[4948]: E0122 14:19:44.695476 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:19:52.695453429 +0000 UTC m=+51.025126368 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.763817 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.763858 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.763869 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.763885 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.763896 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.868294 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.868343 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.868357 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.868374 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.868385 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.927583 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 02:30:12.165825006 +0000 UTC Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.956524 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:44 crc kubenswrapper[4948]: E0122 14:19:44.956721 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.971432 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.971490 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.971506 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.971564 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:44 crc kubenswrapper[4948]: I0122 14:19:44.971622 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:44Z","lastTransitionTime":"2026-01-22T14:19:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.074294 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.074347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.074365 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.074391 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.074409 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.176921 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.176961 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.176971 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.176988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.177026 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.279740 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.279785 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.279797 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.279815 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.279828 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.327164 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/1.log" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.334111 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.334827 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.351171 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.367807 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.380368 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.382590 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.382643 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.382659 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.382682 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.382701 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.399208 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.412862 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.425680 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.437505 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.448543 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.461733 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.466086 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.466150 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.466164 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.466180 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.466216 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.480829 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.480849 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.484538 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.484581 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.484596 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.484614 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.484626 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.496946 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.498046 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.500852 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.500879 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.500889 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.500903 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.500913 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.507688 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.515492 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.519166 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.519644 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.519727 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.519802 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.519868 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.519922 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.531003 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.533741 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.535668 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.535714 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.535729 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.535753 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.535766 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.545884 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.553268 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.553377 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.555884 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.555938 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.555948 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.555983 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.556097 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.556898 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:45Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.659363 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.659405 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.659415 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.659429 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.659439 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.761487 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.761558 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.761570 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.761591 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.761609 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.864755 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.864807 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.864818 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.864834 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.864846 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.928555 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 22:35:33.690570106 +0000 UTC Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.956452 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.956492 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.956552 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.956595 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.956656 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:45 crc kubenswrapper[4948]: E0122 14:19:45.956753 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.967110 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.967174 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.967192 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.967216 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:45 crc kubenswrapper[4948]: I0122 14:19:45.967234 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:45Z","lastTransitionTime":"2026-01-22T14:19:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.069139 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.069184 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.069196 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.069212 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.069224 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.171790 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.171831 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.171841 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.171856 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.171868 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.275861 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.275899 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.275912 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.275928 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.275941 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.342206 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/2.log" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.342982 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/1.log" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.347800 4948 generic.go:334] "Generic (PLEG): container finished" podID="937749c1-3c7a-4f48-b65a-84b1335d64b6" containerID="b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0" exitCode=1 Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.347839 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerDied","Data":"b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.347884 4948 scope.go:117] "RemoveContainer" containerID="159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.353860 4948 scope.go:117] "RemoveContainer" containerID="b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0" Jan 22 14:19:46 crc kubenswrapper[4948]: E0122 14:19:46.354458 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.373132 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.383359 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.383405 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.383417 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.383435 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.383447 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.395400 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://159bf57702dc28f8e98dc4e7680fa2ae8a1ca81393fd615e28ea5d5b74c83b9f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:33Z\\\",\\\"message\\\":\\\"3.144663 6392 model_client.go:382] Update operations generated as: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-controller-manager-operator/metrics]} name:Service_openshift-controller-manager-operator/metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.5.58:443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {4607c9b7-15f9-4ba0-86e5-0021ba7e4488}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0122 14:19:33.144693 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-machine-config-operator/machine-config-daemon-ldzzt\\\\nI0122 14:19:33.144700 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144710 6392 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-node-identity/network-node-identity-vrzqb\\\\nI0122 14:19:33.144711 6392 obj_retry.go:303] Retry object setup: *v1.Pod openshift-kube-controller-manager/kube-controller-manager-crc\\\\nI0122 14:19:33.144717 6392 ovn.go:134] Ensuring zone local for Pod openshift-network-node-identity/network-node-identity-vrzqb in no\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:32Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.411197 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.422399 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.432117 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.444737 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.458303 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.473770 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.487379 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.487428 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.487443 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.487462 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.487476 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.491987 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.511590 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.528845 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.551937 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.569247 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.587735 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.590735 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.590768 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.590781 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.590805 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.590818 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.602558 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.621049 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:46Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.693701 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.693786 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.693814 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.693849 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.693875 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.797104 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.797454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.797523 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.797611 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.797753 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.900396 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.900807 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.900894 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.901004 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.901130 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:46Z","lastTransitionTime":"2026-01-22T14:19:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.929714 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 06:22:11.140352232 +0000 UTC Jan 22 14:19:46 crc kubenswrapper[4948]: I0122 14:19:46.956299 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:46 crc kubenswrapper[4948]: E0122 14:19:46.956446 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.005142 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.005182 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.005191 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.005207 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.005220 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.108655 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.108716 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.108734 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.108761 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.108780 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.211466 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.211501 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.211510 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.211524 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.211535 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.314463 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.314495 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.314505 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.314519 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.314529 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.352557 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/2.log" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.358134 4948 scope.go:117] "RemoveContainer" containerID="b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0" Jan 22 14:19:47 crc kubenswrapper[4948]: E0122 14:19:47.358534 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.374551 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.389346 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.403067 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.418544 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.418849 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.418903 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.418920 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.418947 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.418968 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.434996 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.447914 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.460988 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.475558 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.492843 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.507153 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.521621 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.521655 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.521667 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.521682 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.521692 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.525968 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.541588 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.575862 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.593432 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.608118 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.623765 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:47Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.623780 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.623817 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.623826 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.623838 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.623848 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.725813 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.725851 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.725864 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.725880 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.725894 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.829680 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.829741 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.829762 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.829789 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.829809 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.930181 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 14:32:30.364071404 +0000 UTC Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.932326 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.932385 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.932407 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.932435 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.932458 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:47Z","lastTransitionTime":"2026-01-22T14:19:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.956589 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.956679 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:47 crc kubenswrapper[4948]: I0122 14:19:47.956617 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:47 crc kubenswrapper[4948]: E0122 14:19:47.956834 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:47 crc kubenswrapper[4948]: E0122 14:19:47.957099 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:47 crc kubenswrapper[4948]: E0122 14:19:47.957247 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.034822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.034944 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.034958 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.034984 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.034998 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.138542 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.138817 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.138888 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.138988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.139089 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.241739 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.241967 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.242062 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.242149 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.242219 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.345005 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.345800 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.345830 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.345849 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.345862 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.448361 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.448425 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.448451 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.448478 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.448501 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.551454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.551499 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.551513 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.551535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.551552 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.655392 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.655476 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.655493 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.655518 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.655535 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.759177 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.759242 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.759259 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.759282 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.759300 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.862382 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.862951 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.863148 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.863303 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.863447 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.930879 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-07 22:18:20.055546975 +0000 UTC Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.956733 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:48 crc kubenswrapper[4948]: E0122 14:19:48.957189 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.967169 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.967216 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.967226 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.967241 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:48 crc kubenswrapper[4948]: I0122 14:19:48.967250 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:48Z","lastTransitionTime":"2026-01-22T14:19:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.075559 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.075597 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.075611 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.075633 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.075649 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.179171 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.179237 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.179255 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.179282 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.179303 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.282289 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.282375 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.282396 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.282429 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.282452 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.386430 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.386503 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.386521 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.386546 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.386565 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.490634 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.490700 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.490735 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.490773 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.490795 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.593947 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.594058 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.594084 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.594114 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.594136 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.697578 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.697632 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.697648 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.697666 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.697680 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.800615 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.800683 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.800703 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.800737 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.800761 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.903362 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.903418 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.903435 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.903455 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.903467 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:49Z","lastTransitionTime":"2026-01-22T14:19:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.931805 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 13:23:11.301822431 +0000 UTC Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.956607 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.956629 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:49 crc kubenswrapper[4948]: I0122 14:19:49.956749 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:49 crc kubenswrapper[4948]: E0122 14:19:49.956741 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:49 crc kubenswrapper[4948]: E0122 14:19:49.956885 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:49 crc kubenswrapper[4948]: E0122 14:19:49.956929 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.006991 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.007068 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.007080 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.007100 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.007118 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.109766 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.109809 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.109822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.109840 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.109855 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.211652 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.211688 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.211697 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.211711 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.211721 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.314267 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.314336 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.314359 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.314391 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.314415 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.416864 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.416917 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.416934 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.416959 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.416975 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.520535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.520624 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.520656 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.520688 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.520709 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.623395 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.623438 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.623449 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.623466 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.623478 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.725870 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.725927 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.725944 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.725965 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.725997 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.828390 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.828426 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.828435 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.828448 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.828457 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.930632 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.930703 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.930719 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.930743 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.930760 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:50Z","lastTransitionTime":"2026-01-22T14:19:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.932735 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 14:17:24.508498671 +0000 UTC Jan 22 14:19:50 crc kubenswrapper[4948]: I0122 14:19:50.956172 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:50 crc kubenswrapper[4948]: E0122 14:19:50.956338 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.033352 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.033412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.033428 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.033449 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.033463 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.136335 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.136377 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.136385 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.136399 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.136409 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.239412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.239478 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.239495 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.239524 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.239543 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.342470 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.342535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.342560 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.342592 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.342614 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.446444 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.446517 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.446539 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.446569 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.446590 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.549451 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.549489 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.549500 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.549515 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.549527 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.652751 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.652833 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.652842 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.652857 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.652865 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.755709 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.755789 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.755816 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.755844 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.755865 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.859788 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.859861 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.859884 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.859914 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.859935 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.933587 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 11:59:37.078795032 +0000 UTC Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.957113 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.957173 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:51 crc kubenswrapper[4948]: E0122 14:19:51.957240 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:51 crc kubenswrapper[4948]: E0122 14:19:51.957305 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.957568 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:51 crc kubenswrapper[4948]: E0122 14:19:51.957877 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.962080 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.962113 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.962123 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.962137 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.962148 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:51Z","lastTransitionTime":"2026-01-22T14:19:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.978118 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:51 crc kubenswrapper[4948]: I0122 14:19:51.994527 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:51Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.015506 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.031856 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.049147 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.061612 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.064321 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.064368 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.064381 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.064420 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.064435 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.078816 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.091194 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.105357 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.124852 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.154738 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.167819 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.167899 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.167929 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.167965 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.167994 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.174390 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.188564 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.203221 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.217473 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.238789 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:52Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.271382 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.271466 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.271487 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.271516 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.271536 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.374644 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.374734 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.374749 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.374771 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.374788 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.477934 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.478002 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.478048 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.478074 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.478095 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.580965 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.581115 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.581141 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.581170 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.581193 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.685606 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.685678 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.685700 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.685728 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.685745 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.781306 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:52 crc kubenswrapper[4948]: E0122 14:19:52.781471 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:52 crc kubenswrapper[4948]: E0122 14:19:52.782124 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:20:08.782086817 +0000 UTC m=+67.111759776 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.790812 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.790877 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.790891 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.790916 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.790933 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.894388 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.894440 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.894454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.894473 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.894488 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.934868 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 02:03:13.867800112 +0000 UTC Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.956475 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:52 crc kubenswrapper[4948]: E0122 14:19:52.956659 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.997650 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.997706 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.997718 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.997735 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:52 crc kubenswrapper[4948]: I0122 14:19:52.997747 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:52Z","lastTransitionTime":"2026-01-22T14:19:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.100284 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.100337 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.100351 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.100370 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.100382 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.203803 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.203888 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.203911 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.203980 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.204002 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.307888 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.307982 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.308000 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.308076 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.308109 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.410248 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.410303 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.410312 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.410326 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.410335 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.512708 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.512766 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.512777 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.512797 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.512809 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.615431 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.615457 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.615466 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.615477 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.615485 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.717693 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.717777 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.717805 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.717857 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.717885 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.790408 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.790750 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:20:25.790718267 +0000 UTC m=+84.120391236 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.820569 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.820628 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.820646 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.820671 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.820693 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.897088 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.897476 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.897690 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.897937 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.897269 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.898595 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:20:25.898555305 +0000 UTC m=+84.228228274 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.898635 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.898886 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.898718 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.899396 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.899416 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.898373 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.899487 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:20:25.899466931 +0000 UTC m=+84.229139870 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.899574 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:20:25.899541533 +0000 UTC m=+84.229214502 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.900216 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.900326 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:20:25.900308595 +0000 UTC m=+84.229981544 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.923762 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.923821 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.923837 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.923859 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.923873 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:53Z","lastTransitionTime":"2026-01-22T14:19:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.936000 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 07:25:31.166984622 +0000 UTC Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.956545 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.956567 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.956678 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.956975 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:53 crc kubenswrapper[4948]: I0122 14:19:53.956810 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:53 crc kubenswrapper[4948]: E0122 14:19:53.957637 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.027091 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.027489 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.027687 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.027850 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.027991 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.133126 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.133173 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.133188 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.133206 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.133218 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.235411 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.235489 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.235511 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.235536 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.235555 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.338437 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.338658 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.338763 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.338857 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.338942 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.441276 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.441333 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.441352 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.441405 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.441425 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.543766 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.544241 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.544315 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.544374 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.544439 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.646885 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.647468 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.647556 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.647633 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.647715 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.750886 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.750950 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.750969 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.750998 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.751038 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.853070 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.853110 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.853122 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.853138 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.853148 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.937143 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 07:56:53.168500425 +0000 UTC Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.955862 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.955903 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.955914 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.955929 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.955940 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:54Z","lastTransitionTime":"2026-01-22T14:19:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:54 crc kubenswrapper[4948]: I0122 14:19:54.956138 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:54 crc kubenswrapper[4948]: E0122 14:19:54.956309 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.059095 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.059161 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.059178 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.059202 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.059221 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.162216 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.162289 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.162314 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.162344 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.162366 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.265948 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.266041 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.266060 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.266086 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.266105 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.322612 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.338196 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.351453 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.369668 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.369758 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.369802 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.369835 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.369860 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.376799 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.392453 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.410923 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.428760 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.454741 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.468919 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.472967 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.473048 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.473066 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.473082 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.473418 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.484489 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.497949 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.511274 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.526406 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.537054 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.550222 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.560962 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.572746 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.576568 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.576630 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.576640 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.576654 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.576663 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.588545 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.679589 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.680182 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.680202 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.680219 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.680231 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.783492 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.783570 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.783588 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.783613 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.783631 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.804333 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.804637 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.804833 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.805116 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.805270 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.822526 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.828472 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.828924 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.829247 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.829450 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.829655 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.850338 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.855085 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.855331 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.855555 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.855814 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.856003 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.879214 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.883392 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.883456 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.883474 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.883497 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.883514 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.898965 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.902756 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.902810 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.902822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.902840 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.902856 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.919935 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:19:55Z is after 2025-08-24T17:21:41Z" Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.920113 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.921671 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.921869 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.921956 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.922088 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.922190 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:55Z","lastTransitionTime":"2026-01-22T14:19:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.938206 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 21:30:05.815667192 +0000 UTC Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.956962 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.957277 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.957466 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:55 crc kubenswrapper[4948]: I0122 14:19:55.957369 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.957753 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:55 crc kubenswrapper[4948]: E0122 14:19:55.957279 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.024571 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.024605 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.024616 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.024633 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.024645 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.127080 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.127117 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.127129 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.127144 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.127155 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.232040 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.232094 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.232112 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.232135 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.232153 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.334713 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.335288 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.335421 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.335594 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.335728 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.438927 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.438989 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.439007 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.439066 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.439086 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.542736 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.542794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.542811 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.542833 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.542850 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.645984 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.646696 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.646898 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.647164 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.647389 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.750469 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.751134 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.751166 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.751198 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.751217 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.853880 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.853953 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.853976 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.854004 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.854093 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.939101 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 09:33:19.945523492 +0000 UTC Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.956232 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.956609 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.956655 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.956673 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.956711 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:56 crc kubenswrapper[4948]: E0122 14:19:56.956702 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:56 crc kubenswrapper[4948]: I0122 14:19:56.956729 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:56Z","lastTransitionTime":"2026-01-22T14:19:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.059009 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.059104 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.059129 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.059158 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.059182 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.162353 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.162411 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.162427 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.162450 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.162466 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.265347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.265397 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.265412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.265433 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.265451 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.368986 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.369095 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.369111 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.369134 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.369148 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.472786 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.472877 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.472907 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.472940 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.472972 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.575890 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.575946 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.575964 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.575988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.576005 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.679881 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.679929 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.679941 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.679961 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.679976 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.782971 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.783067 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.783091 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.783122 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.783144 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.885831 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.885885 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.885902 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.885922 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.885937 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.940311 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 22:09:57.39480087 +0000 UTC Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.956945 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.956999 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:57 crc kubenswrapper[4948]: E0122 14:19:57.957163 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.957206 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:57 crc kubenswrapper[4948]: E0122 14:19:57.957323 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:57 crc kubenswrapper[4948]: E0122 14:19:57.957441 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.988457 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.988512 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.988526 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.988546 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:57 crc kubenswrapper[4948]: I0122 14:19:57.988558 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:57Z","lastTransitionTime":"2026-01-22T14:19:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.091527 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.091562 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.091589 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.091602 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.091611 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.194200 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.194259 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.194271 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.194288 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.194301 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.297112 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.297220 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.297243 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.297276 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.297312 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.399198 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.399237 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.399273 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.399292 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.399303 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.501724 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.501768 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.501777 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.501791 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.501800 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.604231 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.604293 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.604306 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.604321 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.604333 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.706670 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.706713 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.706727 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.706746 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.706760 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.810252 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.810311 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.810320 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.810334 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.810344 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.913633 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.913682 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.913696 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.913715 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.913728 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:58Z","lastTransitionTime":"2026-01-22T14:19:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.941311 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 15:18:57.1659144 +0000 UTC Jan 22 14:19:58 crc kubenswrapper[4948]: I0122 14:19:58.956998 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:19:58 crc kubenswrapper[4948]: E0122 14:19:58.957153 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.016520 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.016596 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.016620 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.016649 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.016665 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.119073 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.119109 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.119118 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.119131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.119140 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.222353 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.222411 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.222428 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.222451 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.222468 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.324926 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.324970 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.324980 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.324995 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.325004 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.428549 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.428590 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.428599 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.428614 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.428627 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.531973 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.532053 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.532070 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.532100 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.532118 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.635412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.635529 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.635554 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.635586 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.635607 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.737806 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.737853 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.737866 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.737883 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.737894 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.840451 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.840507 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.840526 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.840549 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.840568 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.941425 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 23:02:12.111755947 +0000 UTC Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.943338 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.943397 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.943420 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.943448 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.943494 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:19:59Z","lastTransitionTime":"2026-01-22T14:19:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.956467 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:19:59 crc kubenswrapper[4948]: E0122 14:19:59.956618 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.956862 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:19:59 crc kubenswrapper[4948]: E0122 14:19:59.956962 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:19:59 crc kubenswrapper[4948]: I0122 14:19:59.957346 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:19:59 crc kubenswrapper[4948]: E0122 14:19:59.957468 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.046774 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.046843 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.046869 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.046901 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.046949 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.149806 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.149854 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.149873 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.149897 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.149914 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.252598 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.252642 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.252653 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.252670 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.252681 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.355661 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.355733 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.355757 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.355790 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.355813 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.458959 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.459040 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.459056 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.459079 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.459094 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.562094 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.562131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.562140 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.562154 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.562164 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.665604 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.665653 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.665666 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.665687 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.665702 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.768628 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.768720 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.768743 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.768775 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.768801 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.871560 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.871710 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.871741 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.871768 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.871804 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.942478 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 01:11:31.51581066 +0000 UTC Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.956428 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:00 crc kubenswrapper[4948]: E0122 14:20:00.956580 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.957800 4948 scope.go:117] "RemoveContainer" containerID="b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0" Jan 22 14:20:00 crc kubenswrapper[4948]: E0122 14:20:00.958167 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.973948 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.974035 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.974055 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.974073 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:00 crc kubenswrapper[4948]: I0122 14:20:00.974089 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:00Z","lastTransitionTime":"2026-01-22T14:20:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.077411 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.077478 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.077515 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.077546 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.077569 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.180632 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.180723 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.180742 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.180777 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.180801 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.284619 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.284687 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.284704 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.284727 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.284744 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.388673 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.388738 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.388749 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.388772 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.388785 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.492512 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.492680 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.492711 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.492788 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.492814 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.596483 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.596549 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.596565 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.596581 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.596594 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.699809 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.699889 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.699909 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.699938 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.699958 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.803306 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.803339 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.803347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.803359 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.803369 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.905805 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.905880 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.905908 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.905939 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.905965 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:01Z","lastTransitionTime":"2026-01-22T14:20:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.943215 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-08 04:45:33.123630266 +0000 UTC Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.956856 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.956904 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:01 crc kubenswrapper[4948]: E0122 14:20:01.957095 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.957162 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:01 crc kubenswrapper[4948]: E0122 14:20:01.957379 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:01 crc kubenswrapper[4948]: E0122 14:20:01.957500 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.976799 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:01Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:01 crc kubenswrapper[4948]: I0122 14:20:01.993952 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:01Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.009633 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.009702 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.009718 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.009743 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.009759 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.015152 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.045939 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.063504 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.080939 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.092807 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.109355 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.112088 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.112164 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.112188 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.112221 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.112245 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.123989 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.136768 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.148130 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.160928 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.173154 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.188308 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.207323 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.214369 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.214409 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.214422 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.214438 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.214452 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.221930 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.238070 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:02Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.317232 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.317264 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.317275 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.317290 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.317298 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.419483 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.419525 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.419535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.419550 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.419558 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.522329 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.522621 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.522742 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.522895 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.523146 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.626196 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.626510 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.626551 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.626575 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.626592 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.729578 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.729616 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.729627 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.729645 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.729657 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.832292 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.832318 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.832326 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.832338 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.832346 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.935192 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.935253 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.935277 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.935307 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.935328 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:02Z","lastTransitionTime":"2026-01-22T14:20:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.943388 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-31 09:29:36.179173304 +0000 UTC Jan 22 14:20:02 crc kubenswrapper[4948]: I0122 14:20:02.956282 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:02 crc kubenswrapper[4948]: E0122 14:20:02.956472 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.037718 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.037759 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.037777 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.037794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.037803 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.140111 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.140170 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.140189 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.140210 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.140227 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.242576 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.242618 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.242630 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.242649 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.242660 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.345223 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.345304 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.345328 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.345354 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.345377 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.447820 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.447891 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.447911 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.447935 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.447957 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.550168 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.550316 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.550332 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.550348 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.550360 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.653151 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.653202 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.653217 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.653236 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.653251 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.755947 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.755977 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.755986 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.755998 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.756006 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.859744 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.859801 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.859819 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.859840 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.859855 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.944072 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 16:51:39.655226806 +0000 UTC Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.956664 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.956726 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:03 crc kubenswrapper[4948]: E0122 14:20:03.956817 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:03 crc kubenswrapper[4948]: E0122 14:20:03.956919 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.956963 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:03 crc kubenswrapper[4948]: E0122 14:20:03.957132 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.961420 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.961446 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.961454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.961466 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:03 crc kubenswrapper[4948]: I0122 14:20:03.961476 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:03Z","lastTransitionTime":"2026-01-22T14:20:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.064302 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.064342 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.064353 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.064368 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.064379 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.166729 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.166781 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.166797 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.166820 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.166837 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.268722 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.268772 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.268784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.268805 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.268817 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.371401 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.371457 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.371467 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.371483 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.371492 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.474107 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.474144 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.474153 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.474167 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.474177 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.576532 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.576576 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.576587 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.576603 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.576614 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.680518 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.680558 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.680567 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.680583 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.680591 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.783197 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.783238 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.783251 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.783266 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.783278 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.885937 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.885971 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.885980 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.885994 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.886005 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.945073 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 03:47:38.805230193 +0000 UTC Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.956484 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:04 crc kubenswrapper[4948]: E0122 14:20:04.956595 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.988100 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.988151 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.988171 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.988205 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:04 crc kubenswrapper[4948]: I0122 14:20:04.988224 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:04Z","lastTransitionTime":"2026-01-22T14:20:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.090273 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.090312 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.090322 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.090336 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.090346 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.193467 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.193526 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.193537 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.193555 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.193567 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.296268 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.296301 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.296309 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.296321 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.296330 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.398198 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.398420 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.398514 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.398613 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.398677 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.501243 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.501396 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.501452 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.501508 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.501572 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.604115 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.604624 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.604757 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.604891 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.605039 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.707600 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.707828 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.707893 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.707966 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.708060 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.809925 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.810229 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.810298 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.810372 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.810440 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.912089 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.912121 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.912130 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.912147 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.912156 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:05Z","lastTransitionTime":"2026-01-22T14:20:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.945871 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-13 16:30:05.306282065 +0000 UTC Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.956340 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.956373 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:05 crc kubenswrapper[4948]: E0122 14:20:05.956646 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:05 crc kubenswrapper[4948]: E0122 14:20:05.956532 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:05 crc kubenswrapper[4948]: I0122 14:20:05.956396 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:05 crc kubenswrapper[4948]: E0122 14:20:05.956726 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.014794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.014870 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.014887 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.014912 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.014927 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.016090 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.016193 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.016253 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.016326 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.016389 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: E0122 14:20:06.028966 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:06Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.032247 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.032406 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.032558 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.032659 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.032742 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: E0122 14:20:06.044485 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:06Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.048037 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.048181 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.048304 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.048402 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.048585 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: E0122 14:20:06.062621 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:06Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.066562 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.066590 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.066598 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.066608 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.066616 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: E0122 14:20:06.078958 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:06Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.082025 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.082064 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.082076 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.082092 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.082104 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: E0122 14:20:06.094469 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:06Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:06Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:06 crc kubenswrapper[4948]: E0122 14:20:06.094616 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.117123 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.117155 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.117163 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.117177 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.117187 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.219244 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.219285 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.219297 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.219313 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.219323 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.321449 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.321484 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.321494 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.321508 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.321517 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.422867 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.423153 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.423180 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.423200 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.423215 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.525959 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.525999 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.526023 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.526037 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.526046 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.628674 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.628716 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.628724 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.628740 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.628750 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.730521 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.730569 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.730580 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.730596 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.730605 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.832544 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.832577 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.832586 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.832598 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.832607 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.935196 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.935238 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.935246 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.935262 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.935272 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:06Z","lastTransitionTime":"2026-01-22T14:20:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.946405 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 09:21:25.062440929 +0000 UTC Jan 22 14:20:06 crc kubenswrapper[4948]: I0122 14:20:06.957252 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:06 crc kubenswrapper[4948]: E0122 14:20:06.957451 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.037899 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.037966 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.037988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.038042 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.038062 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.140892 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.140945 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.140964 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.140989 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.141040 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.243794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.243847 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.243864 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.243887 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.243903 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.345781 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.345850 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.345869 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.345895 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.345912 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.448089 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.448138 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.448150 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.448169 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.448182 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.550195 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.550254 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.550274 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.550299 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.550316 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.652740 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.652795 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.652808 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.652830 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.652843 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.755360 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.755412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.755425 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.755446 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.755461 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.859065 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.859139 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.859159 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.859193 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.859217 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.946851 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 19:58:36.318795258 +0000 UTC Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.956125 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.956200 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:07 crc kubenswrapper[4948]: E0122 14:20:07.956269 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:07 crc kubenswrapper[4948]: E0122 14:20:07.956383 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.956397 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:07 crc kubenswrapper[4948]: E0122 14:20:07.956533 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.960870 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.960987 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.961053 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.961107 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:07 crc kubenswrapper[4948]: I0122 14:20:07.961144 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:07Z","lastTransitionTime":"2026-01-22T14:20:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.063765 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.063800 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.063809 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.063823 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.063832 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.165518 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.165784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.165853 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.165920 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.165979 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.268891 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.268937 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.268945 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.268962 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.268972 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.370787 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.370847 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.370859 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.370874 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.370884 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.431735 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwlht_feadf51a-0802-4335-b035-fe290bdd3960/kube-multus/0.log" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.431794 4948 generic.go:334] "Generic (PLEG): container finished" podID="feadf51a-0802-4335-b035-fe290bdd3960" containerID="ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38" exitCode=1 Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.431826 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwlht" event={"ID":"feadf51a-0802-4335-b035-fe290bdd3960","Type":"ContainerDied","Data":"ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.432289 4948 scope.go:117] "RemoveContainer" containerID="ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.442934 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.457797 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.472974 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.473024 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.473192 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.473212 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.473221 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.474400 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.488645 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.498918 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.513494 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.526620 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.538452 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.549819 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.561495 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.575612 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.575646 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.575655 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.575670 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.575679 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.577386 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.596819 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.611839 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.624143 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.634520 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.645534 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.658472 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:08Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.679161 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.679372 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.679512 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.679653 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.679789 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.783488 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.783538 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.783557 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.783584 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.783604 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.866321 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:08 crc kubenswrapper[4948]: E0122 14:20:08.866572 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:20:08 crc kubenswrapper[4948]: E0122 14:20:08.866667 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:20:40.86663538 +0000 UTC m=+99.196308359 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.886887 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.887299 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.887479 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.887651 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.887790 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.947341 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 15:27:39.649431805 +0000 UTC Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.956765 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:08 crc kubenswrapper[4948]: E0122 14:20:08.956908 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.990717 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.990754 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.990763 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.990777 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:08 crc kubenswrapper[4948]: I0122 14:20:08.990786 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:08Z","lastTransitionTime":"2026-01-22T14:20:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.093932 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.093983 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.094000 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.094047 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.094058 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.196913 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.196958 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.196972 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.196991 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.197004 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.299444 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.299490 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.299502 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.299520 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.299533 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.402595 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.402651 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.402664 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.402681 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.402694 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.435641 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwlht_feadf51a-0802-4335-b035-fe290bdd3960/kube-multus/0.log" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.435701 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwlht" event={"ID":"feadf51a-0802-4335-b035-fe290bdd3960","Type":"ContainerStarted","Data":"383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.452868 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.466757 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.483225 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.495082 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.504988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.505035 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.505048 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.505064 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.505075 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.506501 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.527315 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.543308 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.553252 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.565861 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.575176 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.588774 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.599967 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.606883 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.606928 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.606943 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.606967 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.606986 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.611031 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.629924 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.641446 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.653907 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.669058 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:09Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.709044 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.709083 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.709092 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.709108 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.709118 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.811474 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.811518 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.811529 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.811547 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.811558 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.914004 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.914057 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.914069 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.914084 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.914093 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:09Z","lastTransitionTime":"2026-01-22T14:20:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.948482 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 05:39:50.496669532 +0000 UTC Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.956101 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:09 crc kubenswrapper[4948]: E0122 14:20:09.956211 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.956111 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:09 crc kubenswrapper[4948]: E0122 14:20:09.956369 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:09 crc kubenswrapper[4948]: I0122 14:20:09.956283 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:09 crc kubenswrapper[4948]: E0122 14:20:09.956458 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.016228 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.016265 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.016277 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.016293 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.016303 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.118467 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.118553 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.118569 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.118590 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.118604 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.220808 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.220844 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.220854 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.220869 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.220881 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.323121 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.323181 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.323199 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.323224 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.323241 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.425953 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.425993 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.426003 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.426037 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.426049 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.529123 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.529176 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.529190 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.529210 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.529255 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.632277 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.632325 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.632337 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.632357 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.632538 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.734352 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.734379 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.734387 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.734402 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.734410 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.837609 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.837638 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.837649 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.837665 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.837676 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.940134 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.940169 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.940188 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.940205 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.940218 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:10Z","lastTransitionTime":"2026-01-22T14:20:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.949248 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 02:26:19.982259613 +0000 UTC Jan 22 14:20:10 crc kubenswrapper[4948]: I0122 14:20:10.956574 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:10 crc kubenswrapper[4948]: E0122 14:20:10.956714 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.042526 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.042570 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.042582 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.042599 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.042610 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.144875 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.144911 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.144922 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.144936 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.144945 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.247418 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.247458 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.247471 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.247521 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.247534 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.350379 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.350422 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.350435 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.350454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.350466 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.452268 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.452328 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.452344 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.452366 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.452383 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.554808 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.554846 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.554855 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.554870 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.554880 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.656980 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.657041 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.657056 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.657072 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.657084 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.759270 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.759306 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.759315 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.759332 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.759342 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.862141 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.862191 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.862200 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.862215 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.862224 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.950397 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 15:03:23.789788229 +0000 UTC Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.956832 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.956864 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:11 crc kubenswrapper[4948]: E0122 14:20:11.956948 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.956864 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:11 crc kubenswrapper[4948]: E0122 14:20:11.957059 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:11 crc kubenswrapper[4948]: E0122 14:20:11.957136 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.965232 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.965261 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.965272 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.965288 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.965298 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:11Z","lastTransitionTime":"2026-01-22T14:20:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.969657 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:11Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.987415 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:11Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:11 crc kubenswrapper[4948]: I0122 14:20:11.996742 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:11Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.011760 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.023761 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.033964 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.044105 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.052405 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.061940 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.069029 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.069073 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.069087 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.069103 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.069115 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.075850 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.086600 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.099759 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.117498 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.128647 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.137801 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.147899 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.159191 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:12Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.171796 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.171822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.171832 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.171868 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.171878 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.273763 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.273809 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.273824 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.273844 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.273859 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.375910 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.375940 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.375952 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.375968 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.375979 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.482615 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.482656 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.482666 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.482679 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.482696 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.584698 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.584730 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.584738 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.584751 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.584760 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.687172 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.687420 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.687490 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.687579 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.687637 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.790247 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.790284 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.790292 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.790305 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.790313 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.892168 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.892214 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.892227 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.892243 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.892255 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.951504 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 14:33:02.977589172 +0000 UTC Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.956106 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:12 crc kubenswrapper[4948]: E0122 14:20:12.956248 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.994646 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.994683 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.994691 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.994704 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:12 crc kubenswrapper[4948]: I0122 14:20:12.994714 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:12Z","lastTransitionTime":"2026-01-22T14:20:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.102204 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.102412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.102524 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.102716 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.102865 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.206096 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.206155 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.206166 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.206185 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.206196 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.308902 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.309178 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.309252 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.309346 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.309442 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.411486 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.411535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.411547 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.411566 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.411579 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.513370 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.513419 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.513433 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.513449 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.513460 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.615930 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.615988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.616002 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.616040 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.616053 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.719147 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.719205 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.719221 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.719243 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.719257 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.821293 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.821338 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.821348 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.821364 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.821374 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.925177 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.925249 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.925268 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.925290 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.925304 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:13Z","lastTransitionTime":"2026-01-22T14:20:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.952449 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 08:44:33.184816923 +0000 UTC Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.957403 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.957443 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:13 crc kubenswrapper[4948]: I0122 14:20:13.957531 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:13 crc kubenswrapper[4948]: E0122 14:20:13.957639 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:13 crc kubenswrapper[4948]: E0122 14:20:13.957960 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:13 crc kubenswrapper[4948]: E0122 14:20:13.958138 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.027679 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.027735 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.027754 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.027779 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.027797 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.129579 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.129612 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.129620 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.129632 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.129641 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.232124 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.232168 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.232181 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.232197 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.232209 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.334577 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.334610 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.334618 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.334631 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.334639 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.437079 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.437120 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.437131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.437147 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.437159 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.540287 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.540449 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.540473 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.540503 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.540526 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.642384 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.642436 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.642451 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.642468 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.642479 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.745241 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.745286 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.745297 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.745313 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.745327 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.848709 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.848765 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.848788 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.848807 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.848818 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.951408 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.951458 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.951473 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.951551 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.951608 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:14Z","lastTransitionTime":"2026-01-22T14:20:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.952637 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 10:41:33.364084688 +0000 UTC Jan 22 14:20:14 crc kubenswrapper[4948]: I0122 14:20:14.956949 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:14 crc kubenswrapper[4948]: E0122 14:20:14.957116 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.054706 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.055083 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.055174 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.055262 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.055357 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.157292 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.157528 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.157605 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.157697 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.157765 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.260272 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.260331 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.260348 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.260373 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.260390 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.362392 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.362476 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.362499 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.362533 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.362556 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.465273 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.465529 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.465765 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.466081 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.466311 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.569162 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.569438 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.569525 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.569602 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.569679 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.672383 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.672423 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.672433 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.672447 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.672457 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.774989 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.775074 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.775094 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.775115 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.775127 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.878374 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.878445 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.878460 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.878491 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.878509 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.953351 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 08:09:28.920094957 +0000 UTC Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.956796 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.956828 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:15 crc kubenswrapper[4948]: E0122 14:20:15.956932 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.956947 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:15 crc kubenswrapper[4948]: E0122 14:20:15.957278 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:15 crc kubenswrapper[4948]: E0122 14:20:15.957405 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.957719 4948 scope.go:117] "RemoveContainer" containerID="b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.982051 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.982089 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.982098 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.982112 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:15 crc kubenswrapper[4948]: I0122 14:20:15.982122 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:15Z","lastTransitionTime":"2026-01-22T14:20:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.084209 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.084251 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.084260 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.084274 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.084285 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.175471 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.175520 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.175535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.175551 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.175562 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: E0122 14:20:16.186755 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.189993 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.190057 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.190067 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.190083 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.190111 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: E0122 14:20:16.201243 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.204259 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.204291 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.204300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.204315 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.204325 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: E0122 14:20:16.218260 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.222522 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.222572 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.222590 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.222611 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.222628 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: E0122 14:20:16.238804 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.243967 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.244028 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.244042 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.244061 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.244073 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: E0122 14:20:16.256910 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:16Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: E0122 14:20:16.257082 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.259110 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.259166 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.259180 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.259204 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.259217 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.361512 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.361557 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.361569 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.361585 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.361598 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.456140 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/2.log" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.459139 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.460065 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.464627 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.464653 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.464662 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.464678 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.464688 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.482127 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.497121 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.507101 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.525601 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.542428 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.561323 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.569531 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.570005 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.570113 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.570181 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.570238 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.580787 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.597824 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.620866 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.645562 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.662086 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.672265 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.672471 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.672555 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.672637 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.672709 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.676673 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.693671 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.709197 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.722001 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.737302 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.752650 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:16Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.776181 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.776240 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.776259 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.776286 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.776303 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.879817 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.879864 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.879877 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.879896 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.879909 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.954120 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 21:26:37.047315774 +0000 UTC Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.956716 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:16 crc kubenswrapper[4948]: E0122 14:20:16.956912 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.984710 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.984757 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.984772 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.984790 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:16 crc kubenswrapper[4948]: I0122 14:20:16.984803 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:16Z","lastTransitionTime":"2026-01-22T14:20:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.088670 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.088726 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.088742 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.088762 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.088779 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.192491 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.192585 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.192600 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.192622 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.192641 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.296534 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.296586 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.296601 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.296622 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.296639 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.399707 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.399765 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.399784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.399811 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.399834 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.465628 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/3.log" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.466537 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/2.log" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.471259 4948 generic.go:334] "Generic (PLEG): container finished" podID="937749c1-3c7a-4f48-b65a-84b1335d64b6" containerID="39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1" exitCode=1 Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.471313 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerDied","Data":"39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.471363 4948 scope.go:117] "RemoveContainer" containerID="b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.476346 4948 scope.go:117] "RemoveContainer" containerID="39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1" Jan 22 14:20:17 crc kubenswrapper[4948]: E0122 14:20:17.477632 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.490742 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.502143 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.502182 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.502194 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.502212 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.502225 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.507913 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.522684 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.537194 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.551470 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.577556 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b68a112569f8d8a256713c4ca638606ac4fa5bbce2e3e8eb554ee755cbc98aa0\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:19:45Z\\\",\\\"message\\\":\\\" 6595 handler.go:208] Removed *v1.Pod event handler 6\\\\nI0122 14:19:45.154182 6595 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0122 14:19:45.154211 6595 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:19:45.154218 6595 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:19:45.154283 6595 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:19:45.154312 6595 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0122 14:19:45.154317 6595 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0122 14:19:45.154331 6595 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0122 14:19:45.154391 6595 factory.go:656] Stopping watch factory\\\\nI0122 14:19:45.154409 6595 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:19:45.154452 6595 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0122 14:19:45.154468 6595 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0122 14:19:45.154478 6595 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0122 14:19:45.154488 6595 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0122 14:19:45.154515 6595 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:19:45.154519 6595 handler.go:208] Removed *v1.Node even\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:44Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:17Z\\\",\\\"message\\\":\\\":default/a8519615025667110816) with []\\\\nI0122 14:20:17.045126 7013 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0122 14:20:17.045218 7013 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 14:20:17.045270 7013 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045294 7013 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:20:17.045332 7013 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:20:17.045349 7013 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:20:17.045365 7013 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:20:17.045380 7013 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:20:17.045406 7013 factory.go:656] Stopping watch factory\\\\nI0122 14:20:17.045437 7013 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045763 7013 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 14:20:17.045861 7013 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 14:20:17.045904 7013 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:20:17.045926 7013 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 14:20:17.046001 7013 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:20:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.590111 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.601370 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.604708 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.604749 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.604760 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.604784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.604796 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.615383 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.625935 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.636996 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.646691 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.659090 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.671802 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.683532 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.694501 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.707805 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:17Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.708918 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.708985 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.709003 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.709060 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.709079 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.811745 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.811818 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.811841 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.811870 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.811893 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.916003 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.916068 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.916077 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.916094 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.916120 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:17Z","lastTransitionTime":"2026-01-22T14:20:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.954510 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 06:35:36.582020302 +0000 UTC Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.957208 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:17 crc kubenswrapper[4948]: E0122 14:20:17.957323 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.957213 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:17 crc kubenswrapper[4948]: E0122 14:20:17.957395 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:17 crc kubenswrapper[4948]: I0122 14:20:17.957216 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:17 crc kubenswrapper[4948]: E0122 14:20:17.957459 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.018466 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.018517 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.018534 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.018557 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.018573 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.122058 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.122118 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.122131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.122147 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.122159 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.224714 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.224799 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.224818 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.224844 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.224863 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.327297 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.327343 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.327352 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.327399 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.327409 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.430843 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.430907 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.430922 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.430943 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.430957 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.479444 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/3.log" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.483344 4948 scope.go:117] "RemoveContainer" containerID="39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1" Jan 22 14:20:18 crc kubenswrapper[4948]: E0122 14:20:18.483484 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.498165 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.510906 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.522683 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.533232 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.533301 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.533312 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.533329 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.533339 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.538882 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.554549 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.573146 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:17Z\\\",\\\"message\\\":\\\":default/a8519615025667110816) with []\\\\nI0122 14:20:17.045126 7013 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0122 14:20:17.045218 7013 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 14:20:17.045270 7013 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045294 7013 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:20:17.045332 7013 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:20:17.045349 7013 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:20:17.045365 7013 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:20:17.045380 7013 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:20:17.045406 7013 factory.go:656] Stopping watch factory\\\\nI0122 14:20:17.045437 7013 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045763 7013 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 14:20:17.045861 7013 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 14:20:17.045904 7013 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:20:17.045926 7013 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 14:20:17.046001 7013 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:20:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.585917 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.603170 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.617495 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.629303 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.635745 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.635815 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.635826 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.635841 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.636143 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.642251 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.653831 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.664257 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.680987 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.695709 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.708751 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.725059 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:18Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.739290 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.739349 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.739364 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.739385 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.739400 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.843065 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.843114 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.843127 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.843149 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.843162 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.946150 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.946231 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.946249 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.946283 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.946304 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:18Z","lastTransitionTime":"2026-01-22T14:20:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.955444 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 12:51:36.669378226 +0000 UTC Jan 22 14:20:18 crc kubenswrapper[4948]: I0122 14:20:18.956756 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:18 crc kubenswrapper[4948]: E0122 14:20:18.956944 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.049956 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.050035 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.050051 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.050074 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.050089 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.152254 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.152305 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.152319 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.152338 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.152350 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.254728 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.254772 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.254784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.254802 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.254814 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.358601 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.358666 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.358688 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.358718 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.358740 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.461935 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.461988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.462000 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.462038 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.462053 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.565055 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.565111 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.565125 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.565146 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.565160 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.667058 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.667103 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.667113 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.667151 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.667163 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.768903 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.768965 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.769096 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.769118 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.769133 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.871310 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.871353 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.871365 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.871383 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.871397 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.956156 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 21:41:25.11899757 +0000 UTC Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.956377 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.956399 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.956399 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:19 crc kubenswrapper[4948]: E0122 14:20:19.956532 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:19 crc kubenswrapper[4948]: E0122 14:20:19.956901 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:19 crc kubenswrapper[4948]: E0122 14:20:19.956995 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.973975 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.974072 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.974087 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.974108 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:19 crc kubenswrapper[4948]: I0122 14:20:19.974124 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:19Z","lastTransitionTime":"2026-01-22T14:20:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.076308 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.076349 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.076358 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.076373 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.076383 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.179731 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.179779 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.179792 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.179810 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.179822 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.283410 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.283484 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.283503 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.283533 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.283553 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.387062 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.387138 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.387158 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.387184 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.387242 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.490585 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.490634 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.490645 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.490663 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.490674 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.593299 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.593351 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.593359 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.593373 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.593382 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.697284 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.697338 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.697353 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.697375 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.697389 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.800856 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.800925 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.800949 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.800980 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.801001 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.903892 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.904009 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.904081 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.904112 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.904139 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:20Z","lastTransitionTime":"2026-01-22T14:20:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.956538 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 20:08:03.827054926 +0000 UTC Jan 22 14:20:20 crc kubenswrapper[4948]: I0122 14:20:20.956784 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:20 crc kubenswrapper[4948]: E0122 14:20:20.956959 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.006658 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.006718 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.006733 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.006755 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.006768 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.109849 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.109897 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.109908 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.109923 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.109935 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.212533 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.212587 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.212604 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.212626 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.212643 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.314430 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.314462 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.314472 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.314484 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.314493 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.416884 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.416931 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.416949 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.416971 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.416984 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.519312 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.519347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.519358 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.519373 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.519381 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.621196 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.621601 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.621613 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.621630 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.621642 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.723774 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.723813 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.723824 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.723838 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.723849 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.825788 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.825834 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.825846 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.825867 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.825881 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.928922 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.928992 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.929032 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.929054 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.929066 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:21Z","lastTransitionTime":"2026-01-22T14:20:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.956175 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.956232 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.956271 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:21 crc kubenswrapper[4948]: E0122 14:20:21.956445 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:21 crc kubenswrapper[4948]: E0122 14:20:21.956574 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:21 crc kubenswrapper[4948]: E0122 14:20:21.956649 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.956825 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 06:27:18.698561837 +0000 UTC Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.974060 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:21Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.976674 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Jan 22 14:20:21 crc kubenswrapper[4948]: I0122 14:20:21.996542 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:21Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.014748 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.029990 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.032914 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.032951 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.032965 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.032984 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.032995 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.045365 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.061343 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.074417 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.090388 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.105509 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.122700 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.136062 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.136404 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.136463 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.136479 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.136505 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.136524 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.146563 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.157747 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.169824 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.189536 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:17Z\\\",\\\"message\\\":\\\":default/a8519615025667110816) with []\\\\nI0122 14:20:17.045126 7013 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0122 14:20:17.045218 7013 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 14:20:17.045270 7013 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045294 7013 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:20:17.045332 7013 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:20:17.045349 7013 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:20:17.045365 7013 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:20:17.045380 7013 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:20:17.045406 7013 factory.go:656] Stopping watch factory\\\\nI0122 14:20:17.045437 7013 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045763 7013 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 14:20:17.045861 7013 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 14:20:17.045904 7013 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:20:17.045926 7013 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 14:20:17.046001 7013 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:20:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.200489 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.210568 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:22Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.238651 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.238689 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.238699 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.238714 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.238724 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.340830 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.340878 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.340889 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.340905 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.340916 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.443500 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.443534 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.443545 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.443559 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.443571 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.545346 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.545411 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.545424 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.545444 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.545457 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.647400 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.647440 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.647449 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.647461 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.647470 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.750930 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.750973 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.750985 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.751003 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.751037 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.853052 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.853093 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.853103 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.853115 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.853125 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.956103 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.956144 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.956162 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.956181 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.956194 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:22Z","lastTransitionTime":"2026-01-22T14:20:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.956582 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:22 crc kubenswrapper[4948]: E0122 14:20:22.956706 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:22 crc kubenswrapper[4948]: I0122 14:20:22.957660 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 02:19:22.193021311 +0000 UTC Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.059140 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.059184 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.059195 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.059211 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.059226 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.161748 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.161792 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.161803 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.161817 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.161825 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.264506 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.264556 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.264567 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.264585 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.264597 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.371960 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.372215 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.372477 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.372853 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.372876 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.474999 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.475053 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.475065 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.475081 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.475093 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.577543 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.577604 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.577620 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.577643 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.577660 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.680609 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.680662 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.680671 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.680687 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.680699 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.784068 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.784202 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.784224 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.784251 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.784276 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.887002 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.887080 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.887099 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.887120 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.887129 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.957272 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.957332 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.957286 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:23 crc kubenswrapper[4948]: E0122 14:20:23.957522 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:23 crc kubenswrapper[4948]: E0122 14:20:23.957610 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:23 crc kubenswrapper[4948]: E0122 14:20:23.957697 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.957769 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 03:49:42.711457939 +0000 UTC Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.989454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.989483 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.989497 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.989514 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:23 crc kubenswrapper[4948]: I0122 14:20:23.989528 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:23Z","lastTransitionTime":"2026-01-22T14:20:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.092500 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.092579 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.092603 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.092632 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.092671 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.195669 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.195719 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.195733 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.195751 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.195764 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.299274 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.299317 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.299326 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.299341 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.299350 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.402042 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.402095 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.402105 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.402122 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.402134 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.504048 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.504119 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.504136 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.504162 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.504178 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.607254 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.607338 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.607357 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.607387 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.607409 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.710770 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.710831 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.710877 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.710904 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.710921 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.813494 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.813530 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.813540 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.813554 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.813562 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.916300 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.916338 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.916355 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.916380 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.916393 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:24Z","lastTransitionTime":"2026-01-22T14:20:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.956303 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:24 crc kubenswrapper[4948]: E0122 14:20:24.956453 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:24 crc kubenswrapper[4948]: I0122 14:20:24.958314 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 10:58:11.633631072 +0000 UTC Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.018930 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.018988 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.019001 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.019040 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.019054 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.121678 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.121719 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.121732 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.121751 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.121764 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.223799 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.223863 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.223882 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.223899 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.223912 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.326970 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.327029 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.327044 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.327112 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.327128 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.429760 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.429794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.429805 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.429819 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.429828 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.532346 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.532380 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.532389 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.532402 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.532412 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.635116 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.635188 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.635206 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.635229 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.635245 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.738656 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.738822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.738855 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.738882 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.738903 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.840654 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.840714 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.840723 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.840738 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.840750 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.864551 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.864780 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:29.864748242 +0000 UTC m=+148.194421181 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.944108 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.944180 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.944201 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.944224 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.944242 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:25Z","lastTransitionTime":"2026-01-22T14:20:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.956376 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.956532 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.956664 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.956664 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.957169 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.957325 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.958517 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 16:23:31.901072667 +0000 UTC Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.966851 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.966896 4948 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.966964 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.966978 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:21:29.966953056 +0000 UTC m=+148.296625985 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.967228 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.967255 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.967282 4948 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.967385 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-01-22 14:21:29.967359317 +0000 UTC m=+148.297032286 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.967711 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.967862 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.967994 4948 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.968033 4948 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.968086 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-01-22 14:21:29.968069266 +0000 UTC m=+148.297742235 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.968208 4948 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:20:25 crc kubenswrapper[4948]: I0122 14:20:25.967934 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:25 crc kubenswrapper[4948]: E0122 14:20:25.968313 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-01-22 14:21:29.968285582 +0000 UTC m=+148.297958551 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.047344 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.047423 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.047445 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.047469 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.047606 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.150924 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.150993 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.151056 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.151104 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.151126 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.254619 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.254668 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.254681 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.254702 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.254720 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.288754 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.288840 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.288861 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.288892 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.288916 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: E0122 14:20:26.310598 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.317506 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.317582 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.317610 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.317643 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.317665 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: E0122 14:20:26.339654 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.347317 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.347383 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.347417 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.347445 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.347463 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: E0122 14:20:26.370531 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.377804 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.377887 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.377912 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.378197 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.378226 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: E0122 14:20:26.398772 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.404275 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.404315 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.404327 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.404346 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.404360 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: E0122 14:20:26.423240 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:26Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f658f1ea-a0cd-4148-8df5-1908e573417f\\\",\\\"systemUUID\\\":\\\"2f2854c2-dd08-4cb6-93e0-f4a4235576f6\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:26Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:26 crc kubenswrapper[4948]: E0122 14:20:26.423356 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.425660 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.425901 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.425916 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.425941 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.425955 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.528347 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.528422 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.528441 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.528469 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.528488 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.632356 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.632435 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.632461 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.632499 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.632525 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.735898 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.735972 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.735985 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.736003 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.736043 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.839005 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.839100 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.839121 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.839150 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.839170 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.942839 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.942887 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.942901 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.942921 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.942939 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:26Z","lastTransitionTime":"2026-01-22T14:20:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.956354 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:26 crc kubenswrapper[4948]: E0122 14:20:26.956826 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:26 crc kubenswrapper[4948]: I0122 14:20:26.959471 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 11:35:21.673688641 +0000 UTC Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.046201 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.046282 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.046308 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.046337 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.046356 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.149949 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.150062 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.150089 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.150116 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.150137 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.253609 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.253675 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.253693 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.253719 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.253738 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.357184 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.357247 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.357271 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.357299 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.357323 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.460344 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.460390 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.460399 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.460413 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.460422 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.562865 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.562941 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.563072 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.563104 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.563122 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.666551 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.666601 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.666618 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.666637 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.666650 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.769880 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.769953 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.769972 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.769996 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.770047 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.872373 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.872446 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.872482 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.872515 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.872542 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.956919 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.956919 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.956984 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:27 crc kubenswrapper[4948]: E0122 14:20:27.957230 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:27 crc kubenswrapper[4948]: E0122 14:20:27.957427 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:27 crc kubenswrapper[4948]: E0122 14:20:27.957546 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.963201 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 00:14:04.172697151 +0000 UTC Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.975313 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.977147 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.977203 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.977220 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.977241 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:27 crc kubenswrapper[4948]: I0122 14:20:27.977258 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:27Z","lastTransitionTime":"2026-01-22T14:20:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.080147 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.080202 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.080221 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.080244 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.080264 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.183662 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.183708 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.183720 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.183741 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.183754 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.286357 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.286409 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.286423 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.286442 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.286457 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.389618 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.389653 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.389663 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.389686 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.389709 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.492210 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.492289 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.492316 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.492346 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.492369 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.594690 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.594731 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.594740 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.594753 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.594762 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.698384 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.698442 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.698454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.698474 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.698487 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.801620 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.801695 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.801718 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.801748 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.801782 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.904997 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.905088 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.905106 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.905131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.905150 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:28Z","lastTransitionTime":"2026-01-22T14:20:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.956851 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:28 crc kubenswrapper[4948]: E0122 14:20:28.957171 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:28 crc kubenswrapper[4948]: I0122 14:20:28.963832 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 20:29:24.194293696 +0000 UTC Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.008942 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.008984 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.009001 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.009078 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.009096 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.112822 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.112910 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.112944 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.112975 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.112997 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.216591 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.216653 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.216676 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.216708 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.216730 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.319287 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.319336 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.319355 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.319377 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.319394 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.423005 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.423127 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.423151 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.423182 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.423205 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.526340 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.526702 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.526898 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.527240 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.527425 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.631167 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.631267 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.631289 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.631315 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.631333 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.734053 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.734404 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.734571 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.734908 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.735288 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.838376 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.838705 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.838849 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.838987 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.839182 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.941529 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.941583 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.941594 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.941609 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.941617 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:29Z","lastTransitionTime":"2026-01-22T14:20:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.957054 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:29 crc kubenswrapper[4948]: E0122 14:20:29.957172 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.957280 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.957305 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:29 crc kubenswrapper[4948]: E0122 14:20:29.957345 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:29 crc kubenswrapper[4948]: E0122 14:20:29.957812 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.958117 4948 scope.go:117] "RemoveContainer" containerID="39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1" Jan 22 14:20:29 crc kubenswrapper[4948]: E0122 14:20:29.958327 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:20:29 crc kubenswrapper[4948]: I0122 14:20:29.964068 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 15:49:53.785591881 +0000 UTC Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.043864 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.043903 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.043915 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.043932 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.043947 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.166999 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.167079 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.167098 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.167127 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.167145 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.269740 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.269803 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.269823 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.269846 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.269862 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.373309 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.373392 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.373414 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.373441 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.373461 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.477556 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.477608 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.477623 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.477657 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.477678 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.580451 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.580535 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.580549 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.580568 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.580580 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.683990 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.684072 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.684088 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.684108 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.684122 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.786302 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.786362 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.786375 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.786396 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.786408 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.889206 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.889536 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.889637 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.889734 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.889813 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.956840 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:30 crc kubenswrapper[4948]: E0122 14:20:30.957065 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.965081 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-09 15:20:34.335424694 +0000 UTC Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.992690 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.992943 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.993185 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.993414 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:30 crc kubenswrapper[4948]: I0122 14:20:30.993604 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:30Z","lastTransitionTime":"2026-01-22T14:20:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.096406 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.096797 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.097004 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.097355 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.097589 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.201051 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.201108 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.201123 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.201141 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.201153 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.303951 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.304046 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.304066 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.304091 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.304109 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.407475 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.407527 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.407541 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.407562 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.407577 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.511182 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.511231 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.511248 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.511266 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.511278 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.614080 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.614122 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.614130 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.614148 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.614160 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.717115 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.717191 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.717209 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.717234 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.717252 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.819531 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.819578 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.819590 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.819607 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.819621 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.922378 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.922447 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.922464 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.922486 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.922501 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:31Z","lastTransitionTime":"2026-01-22T14:20:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.957270 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.957318 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:31 crc kubenswrapper[4948]: E0122 14:20:31.957433 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.957497 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:31 crc kubenswrapper[4948]: E0122 14:20:31.957641 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:31 crc kubenswrapper[4948]: E0122 14:20:31.957762 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.966026 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-14 22:24:11.486404077 +0000 UTC Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.973552 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cba1d4a14e43c1a6ddec07dea240b4d03f8ff598cb1f73c956e37816a797a5a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:31 crc kubenswrapper[4948]: I0122 14:20:31.996752 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"937749c1-3c7a-4f48-b65a-84b1335d64b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c1ddd883d66df032d348dd5c01bf01eec618e6ceab82f0b74d6411033d96419c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://724993b6da16942a70c289ef23b888e5c99acdb5e757981e26ff9cfb7ca074ad\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b08d2009af5c34e87963d42b418cb5bc2e4420bcae3ff157f76072a644659921\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://174e918b3a7095dee796235feeb4d0ffffbfbba45bd3f647cb68a4980e921778\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://81d7b5a23cfc553e1a1c8a693b618499068a9bd28d8fde52df86091c290081e6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://34a22632cd0bfb0bc2caa9fabdd5f4cbb6a7f4bb9c366fe06c7367d64584cb98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:17Z\\\",\\\"message\\\":\\\":default/a8519615025667110816) with []\\\\nI0122 14:20:17.045126 7013 address_set.go:302] New(bf133528-8652-4c84-85ff-881f0afe9837/default-network-controller:EgressService:egresssvc-served-pods:v4/a13607449821398607916) with []\\\\nI0122 14:20:17.045218 7013 factory.go:1336] Added *v1.Node event handler 7\\\\nI0122 14:20:17.045270 7013 factory.go:1336] Added *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045294 7013 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0122 14:20:17.045332 7013 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0122 14:20:17.045349 7013 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0122 14:20:17.045365 7013 handler.go:208] Removed *v1.Node event handler 2\\\\nI0122 14:20:17.045380 7013 handler.go:208] Removed *v1.Node event handler 7\\\\nI0122 14:20:17.045406 7013 factory.go:656] Stopping watch factory\\\\nI0122 14:20:17.045437 7013 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0122 14:20:17.045763 7013 factory.go:1336] Added *v1.EgressFirewall event handler 9\\\\nI0122 14:20:17.045861 7013 controller.go:132] Adding controller ef_node_controller event handlers\\\\nI0122 14:20:17.045904 7013 ovnkube.go:599] Stopped ovnkube\\\\nI0122 14:20:17.045926 7013 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0122 14:20:17.046001 7013 ovnkube.go:\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:20:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://67faca70b5ec9d02fe3cd9455cb1b95e9dce6fe764d354a3ce7b132ccfc315b1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://3b00487539e34d9a6c6befa690d76567e2bfa5fce82b44da47d55351a4132573\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rgjfx\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-c5294\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:31Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.011169 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ece4aa-3874-4f6e-a88f-efcc2e745c10\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://394b37292a9478d6e14f7cbb503ad5c350bec50720d2e6cfcb9fd2bf5640bd30\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://5175067f2a33555ac08c94572fbffc6c7b5dd06b33356c338a77492e859e5b79\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pqkg5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:35Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-wxjm7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.024774 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fxccj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"1280bb23-c283-4970-a78b-6de7854469ac\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:36Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-29g7q\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:36Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fxccj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.025079 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.025124 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.025158 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.025174 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.025186 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.047710 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"eaf0ffdf-ad9d-4810-b4b5-98b9bd0e280c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://45b96b5c2089778a60c2c5871d958ec65dd1f8cdd59ea993618c30c84c9b2340\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3033fbe7968daf0e893954cce63ef44d3f852ff5c4601033b9b5d5ab5ed4ffa0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9795a2397106e9c3e572667dbd6c43e6bf99a73b34822e48185b74d579107534\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d5c197970cae556369225edda153ddd77c52ac2e16f9f8261554730938751a09\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://64108e021cfaf370b7d475a4e623b9c99fd59b7da896e8da99164fd9ad868219\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a31fe64dd0ebfc63579bf71a0874db31587989d3b1688d626dfa710edb0e86c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a31fe64dd0ebfc63579bf71a0874db31587989d3b1688d626dfa710edb0e86c1\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cf65930d6b5311cd14e4235b7dd68eacc098aa2256c5f4dd579c20660dd30b0a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cf65930d6b5311cd14e4235b7dd68eacc098aa2256c5f4dd579c20660dd30b0a\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://dcd0544fa7edc3b97e9f3ee21e71fefbc1cf920e66fb2cd238ca70ada0b8dcd0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dcd0544fa7edc3b97e9f3ee21e71fefbc1cf920e66fb2cd238ca70ada0b8dcd0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.062479 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e8f450d3-f366-4f66-a51d-63716a135bbb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e801f5e1b61c697128546a578cca9a54917962b47aaec32e0a4eecf6e36ceafc\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://8275683284a880c6011ad583e14e71c3e864a18b132a9608919b7e32c687a860\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://98401f5215dd7230d91292d9beb79d6d17227fc3181cecb297cdf962f06eb788\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.075965 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"82010fba-1711-4f4d-923f-f95d74fe5925\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:55Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://39b1ca48b550f3c5c60b38f85e9238163219c4e67d9d64c61b667bcc6bee07a0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b8ab7b54d1a954fa832fdbf15ff09e53f86b34862b2f94f402a1a535a4a45dfa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1b68e1e2293dd2e7926da9b30214fa8ec236684f533ecfa28da18e7e4853f7cb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c172fc8d47e9ef3c270382104a7d16f1befec0f84d60dc31d862c5de8a417f9d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.089882 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b8bcfb5598a3add28469448541fa135eacc70b997b29ce0e666bb3d849def227\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.102088 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-jwqmh" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"8a86f840-13e5-4b83-a9ab-018c2fa70906\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cf76905955fac50193e43e1dea1f1c375bab125807388cc35f97fb5693a4f059\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-n2dzw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-jwqmh\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.112471 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-j4g7x" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"2d0ef450-a908-4e0f-b551-de0724320540\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://e77e0f7c92ba0b75f07179e4619dbbf9270727b13bd14348dec038239fef65af\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6mzcw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:25Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-j4g7x\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.125781 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e3e303b6-1b1c-4759-b5b2-ab90c680e0b2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4a88e20768986644fa89cd588226f925d57c182ac71ae593245ceb60b95acd69\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44308a29d44b98a8e2c25e71b22a7984d83d8724dc8aa18964fda9d325ea4d26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://44308a29d44b98a8e2c25e71b22a7984d83d8724dc8aa18964fda9d325ea4d26\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.127694 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.127726 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.127736 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.127751 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.127760 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.140197 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"70cc2753-c3d6-409c-ad8a-9dabbb34462d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:02Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1769091555\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1769091555\\\\\\\\\\\\\\\" (2026-01-22 13:19:15 +0000 UTC to 2027-01-22 13:19:15 +0000 UTC (now=2026-01-22 14:19:20.968359518 +0000 UTC))\\\\\\\"\\\\nI0122 14:19:20.968410 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI0122 14:19:20.968435 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI0122 14:19:20.968462 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968488 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI0122 14:19:20.968529 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968534 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI0122 14:19:20.968593 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI0122 14:19:20.968598 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nI0122 14:19:20.968676 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI0122 14:19:20.968697 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3792780753/tls.crt::/tmp/serving-cert-3792780753/tls.key\\\\\\\"\\\\nI0122 14:19:20.968908 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI0122 14:19:20.968718 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF0122 14:19:20.968971 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:05Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:04Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:03Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:03Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:02Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.151932 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.165580 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.182461 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"45147dae-218a-40b8-8e9e-927e49d38624\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:28Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ad48d2cda8a3700afaf5c50dbd1b380cc15c78e9bb88b42701297b7aefd5d5b3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:28Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://decd7496b109ff25b787f3c10cf951c5ae56b5e351e79163dc2285be69ffaac0\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c99b4dad9dc0a2af9007f28790b9fef7d71573560add2cf1e281dc700c3c874e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:23Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1cac3ebe4d673cfa180cd850d0e9429a48f6cb3db5c0de3898a450d53e20556e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:24Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://0a32aea63ffd840a2b54e40bfe7b12369da37d9c09dce26579fda58a2d9df259\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:25Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7e6e22745bf9fe094b2908a546d437adaf2829b82d51a976a72ae84040acc280\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:26Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:26Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a8fbb77e11a2bd26f57dc7b25a9c445ac1f7999bfb9f7e77dd822dea92db7443\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-01-22T14:19:27Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:27Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-bdhh6\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-z6s4b\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.195909 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9f0fa8b8041091949b54c84b2deb29e0fce513218fe7e220f0607528193008be\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://386d5dc79ec263c2faaf0adb3e711ed0848fc10b79b9b545ba871d2de3c453a5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.210909 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.222923 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"355ee4ab-8609-4b0c-ad77-bff335632d2d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:21Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa80e41d2559c538df170c8ea1fee085e18784885e33f7cdc78bfb466cdc577e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-6qn9n\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:21Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-ldzzt\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.230541 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.230576 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.230587 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.230602 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.230613 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.234524 4948 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-jwlht" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"feadf51a-0802-4335-b035-fe290bdd3960\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:23Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:19:22Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-01-22T14:20:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-01-22T14:20:08Z\\\",\\\"message\\\":\\\"2026-01-22T14:19:23+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8\\\\n2026-01-22T14:19:23+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_0bae88fb-90be-4197-870d-ce3b5fd6f9f8 to /host/opt/cni/bin/\\\\n2026-01-22T14:19:23Z [verbose] multus-daemon started\\\\n2026-01-22T14:19:23Z [verbose] Readiness Indicator file check\\\\n2026-01-22T14:20:08Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-01-22T14:19:22Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-01-22T14:20:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t49ls\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-01-22T14:19:22Z\\\"}}\" for pod \"openshift-multus\"/\"multus-jwlht\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-01-22T14:20:32Z is after 2025-08-24T17:21:41Z" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.333168 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.333211 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.333221 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.333236 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.333246 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.439233 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.439270 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.439279 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.439292 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.439302 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.540898 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.540941 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.540950 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.540966 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.540977 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.645103 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.645185 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.645208 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.645239 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.645262 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.749543 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.749604 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.749622 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.749645 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.749662 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.853354 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.853399 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.853410 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.853430 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.853442 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.956204 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:32 crc kubenswrapper[4948]: E0122 14:20:32.956572 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.957428 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.957460 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.957472 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.957486 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.957499 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:32Z","lastTransitionTime":"2026-01-22T14:20:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:32 crc kubenswrapper[4948]: I0122 14:20:32.967135 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 00:00:02.227187947 +0000 UTC Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.059696 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.059752 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.059769 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.059793 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.059812 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.163889 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.163960 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.163983 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.164042 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.164061 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.266918 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.266981 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.266994 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.267027 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.267040 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.369368 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.369405 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.369416 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.369430 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.369442 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.472591 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.472655 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.472675 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.472698 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.472715 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.575127 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.575186 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.575204 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.575228 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.575247 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.678160 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.678648 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.678824 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.678971 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.679227 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.782316 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.782356 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.782367 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.782384 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.782400 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.885412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.885458 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.885474 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.885495 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.885511 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.956624 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.956667 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:33 crc kubenswrapper[4948]: E0122 14:20:33.956819 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.956881 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:33 crc kubenswrapper[4948]: E0122 14:20:33.957095 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:33 crc kubenswrapper[4948]: E0122 14:20:33.957238 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.967436 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-27 02:49:03.642803969 +0000 UTC Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.988694 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.988764 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.988777 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.988797 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:33 crc kubenswrapper[4948]: I0122 14:20:33.988814 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:33Z","lastTransitionTime":"2026-01-22T14:20:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.091719 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.091761 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.091772 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.091787 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.091796 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.194148 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.194219 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.194238 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.194264 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.194283 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.296708 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.296756 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.296773 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.296791 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.296803 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.399935 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.399973 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.399985 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.400003 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.400046 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.503158 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.503201 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.503211 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.503231 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.503245 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.605806 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.605956 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.605985 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.606049 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.606074 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.712197 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.712264 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.712278 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.712296 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.712315 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.814571 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.814634 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.814656 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.814685 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.814708 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.917704 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.917784 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.917809 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.917843 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.917868 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:34Z","lastTransitionTime":"2026-01-22T14:20:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.957159 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:34 crc kubenswrapper[4948]: E0122 14:20:34.957429 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:34 crc kubenswrapper[4948]: I0122 14:20:34.968466 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 18:13:12.57200621 +0000 UTC Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.020115 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.020160 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.020172 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.020188 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.020199 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.123060 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.123131 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.123152 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.123183 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.123206 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.225934 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.226002 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.226047 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.226066 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.226077 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.329436 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.329498 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.329516 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.329534 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.329582 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.432697 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.432768 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.432786 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.432812 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.432833 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.536073 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.536116 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.536129 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.536148 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.536162 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.639088 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.639163 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.639184 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.639210 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.639228 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.742412 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.742538 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.742559 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.742586 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.742604 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.845452 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.845499 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.845511 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.845531 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.845543 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.948744 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.948794 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.948808 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.948827 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.948840 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:35Z","lastTransitionTime":"2026-01-22T14:20:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.956590 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:35 crc kubenswrapper[4948]: E0122 14:20:35.956715 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.956766 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.956847 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:35 crc kubenswrapper[4948]: E0122 14:20:35.956998 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:35 crc kubenswrapper[4948]: E0122 14:20:35.957396 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:35 crc kubenswrapper[4948]: I0122 14:20:35.968645 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 16:39:34.940383064 +0000 UTC Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.051722 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.051758 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.051767 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.051782 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.051792 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.154384 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.154428 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.154437 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.154454 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.154463 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.257462 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.257517 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.257530 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.257549 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.257562 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.359618 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.359678 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.359692 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.359713 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.359727 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.462656 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.462717 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.462736 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.462771 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.462810 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.565910 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.565957 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.565971 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.565990 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.566004 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.668647 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.668688 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.668697 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.668712 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.668723 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.740896 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.741188 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.741260 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.741340 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.741421 4948 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-01-22T14:20:36Z","lastTransitionTime":"2026-01-22T14:20:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.797292 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb"] Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.798302 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.800640 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.801286 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.801531 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.801765 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.847457 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=15.847442747 podStartE2EDuration="15.847442747s" podCreationTimestamp="2026-01-22 14:20:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.844658872 +0000 UTC m=+95.174331801" watchObservedRunningTime="2026-01-22 14:20:36.847442747 +0000 UTC m=+95.177115676" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.862403 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=75.862347241 podStartE2EDuration="1m15.862347241s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.862138115 +0000 UTC m=+95.191811054" watchObservedRunningTime="2026-01-22 14:20:36.862347241 +0000 UTC m=+95.192020190" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.874007 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=41.873986895 podStartE2EDuration="41.873986895s" podCreationTimestamp="2026-01-22 14:19:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.873898493 +0000 UTC m=+95.203571432" watchObservedRunningTime="2026-01-22 14:20:36.873986895 +0000 UTC m=+95.203659824" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.891371 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3e2033c4-8ecc-485d-90bb-962e6ed7c237-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.891448 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3e2033c4-8ecc-485d-90bb-962e6ed7c237-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.891548 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e2033c4-8ecc-485d-90bb-962e6ed7c237-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.891580 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e2033c4-8ecc-485d-90bb-962e6ed7c237-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.891609 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e2033c4-8ecc-485d-90bb-962e6ed7c237-service-ca\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.894690 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-jwqmh" podStartSLOduration=75.894680845 podStartE2EDuration="1m15.894680845s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.894308055 +0000 UTC m=+95.223981004" watchObservedRunningTime="2026-01-22 14:20:36.894680845 +0000 UTC m=+95.224353774" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.906231 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-j4g7x" podStartSLOduration=75.906214157 podStartE2EDuration="1m15.906214157s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.904832169 +0000 UTC m=+95.234505098" watchObservedRunningTime="2026-01-22 14:20:36.906214157 +0000 UTC m=+95.235887096" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.932335 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=75.932313063 podStartE2EDuration="1m15.932313063s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.93188813 +0000 UTC m=+95.261561089" watchObservedRunningTime="2026-01-22 14:20:36.932313063 +0000 UTC m=+95.261986002" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.933221 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=9.933206796 podStartE2EDuration="9.933206796s" podCreationTimestamp="2026-01-22 14:20:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.915497518 +0000 UTC m=+95.245170467" watchObservedRunningTime="2026-01-22 14:20:36.933206796 +0000 UTC m=+95.262879735" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.957198 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:36 crc kubenswrapper[4948]: E0122 14:20:36.957365 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.969415 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 18:17:38.600136036 +0000 UTC Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.969477 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.976988 4948 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.992614 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e2033c4-8ecc-485d-90bb-962e6ed7c237-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.992651 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e2033c4-8ecc-485d-90bb-962e6ed7c237-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.992672 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e2033c4-8ecc-485d-90bb-962e6ed7c237-service-ca\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.992699 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3e2033c4-8ecc-485d-90bb-962e6ed7c237-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.992722 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3e2033c4-8ecc-485d-90bb-962e6ed7c237-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.992779 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/3e2033c4-8ecc-485d-90bb-962e6ed7c237-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.992846 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/3e2033c4-8ecc-485d-90bb-962e6ed7c237-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:36 crc kubenswrapper[4948]: I0122 14:20:36.993574 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/3e2033c4-8ecc-485d-90bb-962e6ed7c237-service-ca\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.001699 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3e2033c4-8ecc-485d-90bb-962e6ed7c237-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.012639 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-z6s4b" podStartSLOduration=76.012613844 podStartE2EDuration="1m16.012613844s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:36.984532514 +0000 UTC m=+95.314205493" watchObservedRunningTime="2026-01-22 14:20:37.012613844 +0000 UTC m=+95.342286783" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.016990 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3e2033c4-8ecc-485d-90bb-962e6ed7c237-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-q2nzb\" (UID: \"3e2033c4-8ecc-485d-90bb-962e6ed7c237\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.059542 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podStartSLOduration=76.059516162 podStartE2EDuration="1m16.059516162s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:37.058661889 +0000 UTC m=+95.388334818" watchObservedRunningTime="2026-01-22 14:20:37.059516162 +0000 UTC m=+95.389189131" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.087827 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-jwlht" podStartSLOduration=76.087790617 podStartE2EDuration="1m16.087790617s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:37.07276428 +0000 UTC m=+95.402437229" watchObservedRunningTime="2026-01-22 14:20:37.087790617 +0000 UTC m=+95.417463556" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.117762 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.132451 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-wxjm7" podStartSLOduration=75.132427244 podStartE2EDuration="1m15.132427244s" podCreationTimestamp="2026-01-22 14:19:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:37.131859788 +0000 UTC m=+95.461532757" watchObservedRunningTime="2026-01-22 14:20:37.132427244 +0000 UTC m=+95.462100173" Jan 22 14:20:37 crc kubenswrapper[4948]: W0122 14:20:37.152441 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3e2033c4_8ecc_485d_90bb_962e6ed7c237.slice/crio-f6fdf8413ab33561e5a416fa5f00fa327549e0e3a4613a5dd686b81697d90c49 WatchSource:0}: Error finding container f6fdf8413ab33561e5a416fa5f00fa327549e0e3a4613a5dd686b81697d90c49: Status 404 returned error can't find the container with id f6fdf8413ab33561e5a416fa5f00fa327549e0e3a4613a5dd686b81697d90c49 Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.546639 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" event={"ID":"3e2033c4-8ecc-485d-90bb-962e6ed7c237","Type":"ContainerStarted","Data":"68912cc93844baa76d5c164aca0e45cc74751e9b1088090b268bfc896d2adb58"} Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.547097 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" event={"ID":"3e2033c4-8ecc-485d-90bb-962e6ed7c237","Type":"ContainerStarted","Data":"f6fdf8413ab33561e5a416fa5f00fa327549e0e3a4613a5dd686b81697d90c49"} Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.559948 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-q2nzb" podStartSLOduration=76.559929913 podStartE2EDuration="1m16.559929913s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:20:37.559704627 +0000 UTC m=+95.889377566" watchObservedRunningTime="2026-01-22 14:20:37.559929913 +0000 UTC m=+95.889602842" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.956793 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.956835 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:37 crc kubenswrapper[4948]: E0122 14:20:37.956946 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:37 crc kubenswrapper[4948]: I0122 14:20:37.956801 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:37 crc kubenswrapper[4948]: E0122 14:20:37.957046 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:37 crc kubenswrapper[4948]: E0122 14:20:37.957255 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:38 crc kubenswrapper[4948]: I0122 14:20:38.956949 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:38 crc kubenswrapper[4948]: E0122 14:20:38.957283 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:39 crc kubenswrapper[4948]: I0122 14:20:39.958357 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:39 crc kubenswrapper[4948]: I0122 14:20:39.958367 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:39 crc kubenswrapper[4948]: I0122 14:20:39.958456 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:39 crc kubenswrapper[4948]: E0122 14:20:39.958738 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:39 crc kubenswrapper[4948]: E0122 14:20:39.958854 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:39 crc kubenswrapper[4948]: E0122 14:20:39.958929 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:40 crc kubenswrapper[4948]: I0122 14:20:40.938159 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:40 crc kubenswrapper[4948]: E0122 14:20:40.938304 4948 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:20:40 crc kubenswrapper[4948]: E0122 14:20:40.938378 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs podName:1280bb23-c283-4970-a78b-6de7854469ac nodeName:}" failed. No retries permitted until 2026-01-22 14:21:44.938359784 +0000 UTC m=+163.268032723 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs") pod "network-metrics-daemon-fxccj" (UID: "1280bb23-c283-4970-a78b-6de7854469ac") : object "openshift-multus"/"metrics-daemon-secret" not registered Jan 22 14:20:40 crc kubenswrapper[4948]: I0122 14:20:40.956761 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:40 crc kubenswrapper[4948]: E0122 14:20:40.956969 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:40 crc kubenswrapper[4948]: I0122 14:20:40.958077 4948 scope.go:117] "RemoveContainer" containerID="39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1" Jan 22 14:20:40 crc kubenswrapper[4948]: E0122 14:20:40.958373 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:20:41 crc kubenswrapper[4948]: I0122 14:20:41.956312 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:41 crc kubenswrapper[4948]: I0122 14:20:41.956308 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:41 crc kubenswrapper[4948]: I0122 14:20:41.956307 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:41 crc kubenswrapper[4948]: E0122 14:20:41.957482 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:41 crc kubenswrapper[4948]: E0122 14:20:41.957552 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:41 crc kubenswrapper[4948]: E0122 14:20:41.957632 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:42 crc kubenswrapper[4948]: I0122 14:20:42.956297 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:42 crc kubenswrapper[4948]: E0122 14:20:42.956747 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:43 crc kubenswrapper[4948]: I0122 14:20:43.957165 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:43 crc kubenswrapper[4948]: I0122 14:20:43.957298 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:43 crc kubenswrapper[4948]: I0122 14:20:43.957298 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:43 crc kubenswrapper[4948]: E0122 14:20:43.957775 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:43 crc kubenswrapper[4948]: E0122 14:20:43.957934 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:43 crc kubenswrapper[4948]: E0122 14:20:43.958059 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:44 crc kubenswrapper[4948]: I0122 14:20:44.956907 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:44 crc kubenswrapper[4948]: E0122 14:20:44.957130 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:45 crc kubenswrapper[4948]: I0122 14:20:45.956658 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:45 crc kubenswrapper[4948]: E0122 14:20:45.956830 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:45 crc kubenswrapper[4948]: I0122 14:20:45.957123 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:45 crc kubenswrapper[4948]: E0122 14:20:45.957194 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:45 crc kubenswrapper[4948]: I0122 14:20:45.957459 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:45 crc kubenswrapper[4948]: E0122 14:20:45.957738 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:46 crc kubenswrapper[4948]: I0122 14:20:46.956479 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:46 crc kubenswrapper[4948]: E0122 14:20:46.956621 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:47 crc kubenswrapper[4948]: I0122 14:20:47.956971 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:47 crc kubenswrapper[4948]: I0122 14:20:47.957158 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:47 crc kubenswrapper[4948]: I0122 14:20:47.956966 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:47 crc kubenswrapper[4948]: E0122 14:20:47.957259 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:47 crc kubenswrapper[4948]: E0122 14:20:47.957458 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:47 crc kubenswrapper[4948]: E0122 14:20:47.957623 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:48 crc kubenswrapper[4948]: I0122 14:20:48.956567 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:48 crc kubenswrapper[4948]: E0122 14:20:48.956701 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:49 crc kubenswrapper[4948]: I0122 14:20:49.957275 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:49 crc kubenswrapper[4948]: I0122 14:20:49.957301 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:49 crc kubenswrapper[4948]: I0122 14:20:49.957357 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:49 crc kubenswrapper[4948]: E0122 14:20:49.958313 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:49 crc kubenswrapper[4948]: E0122 14:20:49.958438 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:49 crc kubenswrapper[4948]: E0122 14:20:49.958567 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:50 crc kubenswrapper[4948]: I0122 14:20:50.956401 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:50 crc kubenswrapper[4948]: E0122 14:20:50.956522 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:51 crc kubenswrapper[4948]: I0122 14:20:51.956547 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:51 crc kubenswrapper[4948]: I0122 14:20:51.958900 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:51 crc kubenswrapper[4948]: E0122 14:20:51.959242 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:51 crc kubenswrapper[4948]: I0122 14:20:51.959673 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:51 crc kubenswrapper[4948]: E0122 14:20:51.960090 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:51 crc kubenswrapper[4948]: E0122 14:20:51.960542 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:51 crc kubenswrapper[4948]: I0122 14:20:51.961235 4948 scope.go:117] "RemoveContainer" containerID="39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1" Jan 22 14:20:51 crc kubenswrapper[4948]: E0122 14:20:51.961571 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-c5294_openshift-ovn-kubernetes(937749c1-3c7a-4f48-b65a-84b1335d64b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podUID="937749c1-3c7a-4f48-b65a-84b1335d64b6" Jan 22 14:20:52 crc kubenswrapper[4948]: I0122 14:20:52.956775 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:52 crc kubenswrapper[4948]: E0122 14:20:52.956966 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:53 crc kubenswrapper[4948]: I0122 14:20:53.957069 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:53 crc kubenswrapper[4948]: I0122 14:20:53.957156 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:53 crc kubenswrapper[4948]: E0122 14:20:53.957227 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:53 crc kubenswrapper[4948]: E0122 14:20:53.957417 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:53 crc kubenswrapper[4948]: I0122 14:20:53.957071 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:53 crc kubenswrapper[4948]: E0122 14:20:53.957623 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:54 crc kubenswrapper[4948]: I0122 14:20:54.613978 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwlht_feadf51a-0802-4335-b035-fe290bdd3960/kube-multus/1.log" Jan 22 14:20:54 crc kubenswrapper[4948]: I0122 14:20:54.614553 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwlht_feadf51a-0802-4335-b035-fe290bdd3960/kube-multus/0.log" Jan 22 14:20:54 crc kubenswrapper[4948]: I0122 14:20:54.614605 4948 generic.go:334] "Generic (PLEG): container finished" podID="feadf51a-0802-4335-b035-fe290bdd3960" containerID="383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b" exitCode=1 Jan 22 14:20:54 crc kubenswrapper[4948]: I0122 14:20:54.614637 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwlht" event={"ID":"feadf51a-0802-4335-b035-fe290bdd3960","Type":"ContainerDied","Data":"383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b"} Jan 22 14:20:54 crc kubenswrapper[4948]: I0122 14:20:54.614671 4948 scope.go:117] "RemoveContainer" containerID="ad7518603f4f92f9577af66edbe334169e1145b1be7cc7cec022800ba3e08c38" Jan 22 14:20:54 crc kubenswrapper[4948]: I0122 14:20:54.615532 4948 scope.go:117] "RemoveContainer" containerID="383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b" Jan 22 14:20:54 crc kubenswrapper[4948]: E0122 14:20:54.616005 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-jwlht_openshift-multus(feadf51a-0802-4335-b035-fe290bdd3960)\"" pod="openshift-multus/multus-jwlht" podUID="feadf51a-0802-4335-b035-fe290bdd3960" Jan 22 14:20:54 crc kubenswrapper[4948]: I0122 14:20:54.956430 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:54 crc kubenswrapper[4948]: E0122 14:20:54.956567 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:55 crc kubenswrapper[4948]: I0122 14:20:55.621649 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwlht_feadf51a-0802-4335-b035-fe290bdd3960/kube-multus/1.log" Jan 22 14:20:55 crc kubenswrapper[4948]: I0122 14:20:55.957001 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:55 crc kubenswrapper[4948]: I0122 14:20:55.957095 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:55 crc kubenswrapper[4948]: I0122 14:20:55.957063 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:55 crc kubenswrapper[4948]: E0122 14:20:55.957232 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:55 crc kubenswrapper[4948]: E0122 14:20:55.957390 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:55 crc kubenswrapper[4948]: E0122 14:20:55.957643 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:56 crc kubenswrapper[4948]: I0122 14:20:56.956881 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:56 crc kubenswrapper[4948]: E0122 14:20:56.957135 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:57 crc kubenswrapper[4948]: I0122 14:20:57.956731 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:57 crc kubenswrapper[4948]: I0122 14:20:57.956818 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:57 crc kubenswrapper[4948]: I0122 14:20:57.956871 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:57 crc kubenswrapper[4948]: E0122 14:20:57.957147 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:20:57 crc kubenswrapper[4948]: E0122 14:20:57.957283 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:57 crc kubenswrapper[4948]: E0122 14:20:57.957619 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:58 crc kubenswrapper[4948]: I0122 14:20:58.957121 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:20:58 crc kubenswrapper[4948]: E0122 14:20:58.957345 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:20:59 crc kubenswrapper[4948]: I0122 14:20:59.957433 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:20:59 crc kubenswrapper[4948]: I0122 14:20:59.957433 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:20:59 crc kubenswrapper[4948]: E0122 14:20:59.957726 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:20:59 crc kubenswrapper[4948]: I0122 14:20:59.957422 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:20:59 crc kubenswrapper[4948]: E0122 14:20:59.957983 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:20:59 crc kubenswrapper[4948]: E0122 14:20:59.958141 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:21:00 crc kubenswrapper[4948]: I0122 14:21:00.957262 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:00 crc kubenswrapper[4948]: E0122 14:21:00.957448 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:21:01 crc kubenswrapper[4948]: I0122 14:21:01.956567 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:01 crc kubenswrapper[4948]: I0122 14:21:01.956606 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:01 crc kubenswrapper[4948]: I0122 14:21:01.956661 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:01 crc kubenswrapper[4948]: E0122 14:21:01.963066 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:21:01 crc kubenswrapper[4948]: E0122 14:21:01.963267 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:21:01 crc kubenswrapper[4948]: E0122 14:21:01.963472 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:21:01 crc kubenswrapper[4948]: E0122 14:21:01.980353 4948 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Jan 22 14:21:02 crc kubenswrapper[4948]: E0122 14:21:02.114152 4948 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 14:21:02 crc kubenswrapper[4948]: I0122 14:21:02.956534 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:02 crc kubenswrapper[4948]: E0122 14:21:02.956746 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:21:03 crc kubenswrapper[4948]: I0122 14:21:03.957114 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:03 crc kubenswrapper[4948]: I0122 14:21:03.957133 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:03 crc kubenswrapper[4948]: I0122 14:21:03.957162 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:03 crc kubenswrapper[4948]: E0122 14:21:03.957263 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:21:03 crc kubenswrapper[4948]: E0122 14:21:03.957318 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:21:03 crc kubenswrapper[4948]: E0122 14:21:03.957389 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:21:04 crc kubenswrapper[4948]: I0122 14:21:04.956183 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:04 crc kubenswrapper[4948]: E0122 14:21:04.956324 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:21:05 crc kubenswrapper[4948]: I0122 14:21:05.956850 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:05 crc kubenswrapper[4948]: I0122 14:21:05.956850 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:05 crc kubenswrapper[4948]: E0122 14:21:05.956996 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:21:05 crc kubenswrapper[4948]: E0122 14:21:05.957085 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:21:05 crc kubenswrapper[4948]: I0122 14:21:05.957596 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:05 crc kubenswrapper[4948]: E0122 14:21:05.957717 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:21:06 crc kubenswrapper[4948]: I0122 14:21:06.956694 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:06 crc kubenswrapper[4948]: E0122 14:21:06.957136 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:21:06 crc kubenswrapper[4948]: I0122 14:21:06.957405 4948 scope.go:117] "RemoveContainer" containerID="39986e0fef791ac8fb51e96c9f7bac60fcf8c3672361aad41521f3aece660aa1" Jan 22 14:21:07 crc kubenswrapper[4948]: E0122 14:21:07.115958 4948 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.671594 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-c5294_937749c1-3c7a-4f48-b65a-84b1335d64b6/ovnkube-controller/3.log" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.674577 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" event={"ID":"937749c1-3c7a-4f48-b65a-84b1335d64b6","Type":"ContainerStarted","Data":"da8426dfb4b8d310c2f13ac798691912349b392ef45f337f2a72364601f3dfe4"} Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.674991 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.704829 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" podStartSLOduration=106.704810612 podStartE2EDuration="1m46.704810612s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:07.704622457 +0000 UTC m=+126.034295386" watchObservedRunningTime="2026-01-22 14:21:07.704810612 +0000 UTC m=+126.034483541" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.958546 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:07 crc kubenswrapper[4948]: E0122 14:21:07.959093 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.958568 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:07 crc kubenswrapper[4948]: E0122 14:21:07.959178 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.958900 4948 scope.go:117] "RemoveContainer" containerID="383602c28d40976a941bc8e2f079879a9e8b7d668b994058bd005455eaf3260b" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.958921 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:07 crc kubenswrapper[4948]: E0122 14:21:07.959320 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.969777 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fxccj"] Jan 22 14:21:07 crc kubenswrapper[4948]: I0122 14:21:07.969879 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:07 crc kubenswrapper[4948]: E0122 14:21:07.970004 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:21:08 crc kubenswrapper[4948]: I0122 14:21:08.680682 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-jwlht_feadf51a-0802-4335-b035-fe290bdd3960/kube-multus/1.log" Jan 22 14:21:08 crc kubenswrapper[4948]: I0122 14:21:08.680833 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-jwlht" event={"ID":"feadf51a-0802-4335-b035-fe290bdd3960","Type":"ContainerStarted","Data":"0b0c47fb983c5c68cde820745c3544267679ae222116e10f419365a6cad3e2ea"} Jan 22 14:21:09 crc kubenswrapper[4948]: I0122 14:21:09.957047 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:09 crc kubenswrapper[4948]: I0122 14:21:09.957208 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:09 crc kubenswrapper[4948]: E0122 14:21:09.957747 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:21:09 crc kubenswrapper[4948]: I0122 14:21:09.957238 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:09 crc kubenswrapper[4948]: I0122 14:21:09.957215 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:09 crc kubenswrapper[4948]: E0122 14:21:09.957905 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:21:09 crc kubenswrapper[4948]: E0122 14:21:09.957972 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:21:09 crc kubenswrapper[4948]: E0122 14:21:09.958093 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:21:11 crc kubenswrapper[4948]: I0122 14:21:11.957154 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:11 crc kubenswrapper[4948]: I0122 14:21:11.957252 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:11 crc kubenswrapper[4948]: I0122 14:21:11.957256 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:11 crc kubenswrapper[4948]: E0122 14:21:11.958353 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Jan 22 14:21:11 crc kubenswrapper[4948]: I0122 14:21:11.958380 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:11 crc kubenswrapper[4948]: E0122 14:21:11.958536 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Jan 22 14:21:11 crc kubenswrapper[4948]: E0122 14:21:11.958721 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Jan 22 14:21:11 crc kubenswrapper[4948]: E0122 14:21:11.958863 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fxccj" podUID="1280bb23-c283-4970-a78b-6de7854469ac" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.956370 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.956391 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.956368 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.956592 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.958411 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.959181 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.959416 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.959536 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.959661 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 14:21:13 crc kubenswrapper[4948]: I0122 14:21:13.962425 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 14:21:14 crc kubenswrapper[4948]: I0122 14:21:14.333810 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-c5294" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.122344 4948 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.163005 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4mvfm"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.167537 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.168599 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lkpqv"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.172369 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.172827 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.173150 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.173233 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.173372 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.173912 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.178180 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.178526 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.178779 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.178982 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.183604 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.183668 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.187778 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bbmbn"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.188585 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.188765 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qxjql"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.189255 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.191381 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.191944 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.192085 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.192261 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.194754 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.194878 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.195280 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.195284 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-974mb"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.196070 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.196592 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.197060 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.197550 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.200792 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.201291 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6whzf"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.202211 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.202936 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zq2mw"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.203459 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.205081 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.205518 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.205836 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.206127 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.210574 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.211274 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.211511 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.211680 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.212098 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.212141 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.212325 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.212362 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.212449 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.212778 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.212807 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.213320 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.213380 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.213668 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.214246 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.214318 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.216092 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.216673 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.220860 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.221183 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.222383 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.222558 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cjtzs"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.226881 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.227187 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.227244 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.236682 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.238173 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.238472 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.239162 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.240768 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.237252 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.237562 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.238532 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.242467 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.246755 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.246906 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.247064 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.247235 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.248231 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.250371 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.252039 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.252316 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.252405 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.253734 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.262098 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.262539 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.262824 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.262998 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.263449 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.264093 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.264380 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.264612 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.264815 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.265043 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.265254 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266442 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266516 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266551 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266578 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266646 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.276081 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.276309 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.276451 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.276569 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266676 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266713 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266701 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266770 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266796 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266856 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266863 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.277108 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-config\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266917 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.277139 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-config\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.277157 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-images\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.277173 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-etcd-client\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.266959 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.277195 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.267005 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.267051 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.267084 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.267161 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.268263 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.277681 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.278235 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.278757 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.279453 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.279601 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.279856 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.280040 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.280173 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.280335 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.281337 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.277203 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fznpl\" (UniqueName: \"kubernetes.io/projected/78f96e55-8d97-4d74-8a95-44356bb1c0b1-kube-api-access-fznpl\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282705 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-config\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282735 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-etcd-serving-ca\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282772 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd18b7ad-276e-4008-85c7-e4680c574d27-serving-cert\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282796 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282819 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bv25s\" (UniqueName: \"kubernetes.io/projected/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-kube-api-access-bv25s\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282847 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-client\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282871 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-serving-cert\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282892 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2l4h\" (UniqueName: \"kubernetes.io/projected/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-kube-api-access-g2l4h\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282913 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-image-import-ca\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282952 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.282989 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mwn2r\" (UniqueName: \"kubernetes.io/projected/bd18b7ad-276e-4008-85c7-e4680c574d27-kube-api-access-mwn2r\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283059 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd18b7ad-276e-4008-85c7-e4680c574d27-trusted-ca\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283094 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78f96e55-8d97-4d74-8a95-44356bb1c0b1-serving-cert\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283139 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-service-ca\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283160 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-audit\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283181 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-serving-cert\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283220 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-ca\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283251 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kclqr\" (UniqueName: \"kubernetes.io/projected/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-kube-api-access-kclqr\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283271 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-audit-dir\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283300 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g22j\" (UniqueName: \"kubernetes.io/projected/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-kube-api-access-8g22j\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283329 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283352 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283373 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-node-pullsecrets\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283400 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/78f96e55-8d97-4d74-8a95-44356bb1c0b1-available-featuregates\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283422 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dpx2\" (UniqueName: \"kubernetes.io/projected/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-kube-api-access-4dpx2\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283462 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd18b7ad-276e-4008-85c7-e4680c574d27-config\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283485 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283506 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-encryption-config\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.283531 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.291737 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lkpqv"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.291775 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xf6qs"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.292573 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.293737 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.293763 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.295120 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.295581 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.298501 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.299056 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.299425 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.301773 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jwzjg"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.302666 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-s4jms"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.302689 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.303202 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-s4jms" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.303226 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.303932 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.304590 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.309123 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.309223 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.309351 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.309503 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.309882 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.310053 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.310192 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.310303 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.310409 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.311134 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.312064 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.315543 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.321303 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.321868 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qjjhb"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.329219 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.329740 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.329912 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.332090 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.333872 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.334365 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-5mc8q"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.334579 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.335770 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.336172 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.336472 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.352990 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.354641 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.355361 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.357040 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.360568 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.361378 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.362064 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.362930 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.362961 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-7kdkq"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.363552 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.363920 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtc"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.364415 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.364944 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-xgx8j"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.365700 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.366739 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4mvfm"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.368465 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bbmbn"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.368976 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qxjql"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.370392 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-974mb"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.371483 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.372198 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.372659 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6whzf"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.373657 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-28tff"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.374370 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.374669 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-t6lsv"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.375173 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.375739 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.376671 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cjtzs"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.377675 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.378642 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.379738 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.382951 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.383029 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.383040 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384557 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f9164f3-da46-447e-94fd-ffda5fb2f565-config\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384589 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5xgps\" (UniqueName: \"kubernetes.io/projected/1c262802-72e7-4bc1-92bd-bf8b5551cade-kube-api-access-5xgps\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384609 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3722b0ae-dab1-4301-b98e-de27b27ab754-secret-volume\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384630 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mwn2r\" (UniqueName: \"kubernetes.io/projected/bd18b7ad-276e-4008-85c7-e4680c574d27-kube-api-access-mwn2r\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384647 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384666 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ca154693-ba02-4d7b-a4b6-45fee35a0280-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cjtzs\" (UID: \"ca154693-ba02-4d7b-a4b6-45fee35a0280\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384682 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384697 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384713 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd18b7ad-276e-4008-85c7-e4680c574d27-trusted-ca\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384731 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78f96e55-8d97-4d74-8a95-44356bb1c0b1-serving-cert\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384753 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336a185f-a030-45f8-93f9-be33304506e1-config\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384779 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-service-ca\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384794 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-audit\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384810 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-serving-cert\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384825 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a5509fc-2393-4932-be1c-6b9379065100-config\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384850 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c262802-72e7-4bc1-92bd-bf8b5551cade-proxy-tls\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384866 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384881 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384897 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-etcd-client\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384914 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384931 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/33ae6fda-0f24-436c-b813-e01bb556110e-webhook-cert\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384953 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kclqr\" (UniqueName: \"kubernetes.io/projected/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-kube-api-access-kclqr\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384969 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-ca\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.384989 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mbxdz\" (UniqueName: \"kubernetes.io/projected/1cb99e0f-ec79-4e50-9284-f161cfe70a93-kube-api-access-mbxdz\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385046 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-audit-dir\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385077 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a5509fc-2393-4932-be1c-6b9379065100-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385097 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385112 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hx5nz\" (UniqueName: \"kubernetes.io/projected/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-kube-api-access-hx5nz\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385127 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3722b0ae-dab1-4301-b98e-de27b27ab754-config-volume\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385143 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d6d99019-a94b-4736-b5f4-496d973e0bc8-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9f5vd\" (UID: \"d6d99019-a94b-4736-b5f4-496d973e0bc8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385162 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-config\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385180 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g22j\" (UniqueName: \"kubernetes.io/projected/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-kube-api-access-8g22j\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385201 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385219 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385237 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ld58\" (UniqueName: \"kubernetes.io/projected/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-kube-api-access-8ld58\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385255 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a316cc87-51ce-49fd-b81c-fab7d253dea6-metrics-tls\") pod \"dns-operator-744455d44c-6whzf\" (UID: \"a316cc87-51ce-49fd-b81c-fab7d253dea6\") " pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385273 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x4fjj\" (UniqueName: \"kubernetes.io/projected/d6d99019-a94b-4736-b5f4-496d973e0bc8-kube-api-access-x4fjj\") pod \"control-plane-machine-set-operator-78cbb6b69f-9f5vd\" (UID: \"d6d99019-a94b-4736-b5f4-496d973e0bc8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385291 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-node-pullsecrets\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385310 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385327 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-27wzl\" (UniqueName: \"kubernetes.io/projected/2314b196-d44a-487b-b9db-e0396b172957-kube-api-access-27wzl\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385344 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-encryption-config\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385361 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-client-ca\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385380 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385398 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxvh8\" (UniqueName: \"kubernetes.io/projected/fcb187b2-3022-41a1-a9da-0cb254b18c16-kube-api-access-qxvh8\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385413 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/33ae6fda-0f24-436c-b813-e01bb556110e-tmpfs\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385440 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/78f96e55-8d97-4d74-8a95-44356bb1c0b1-available-featuregates\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385457 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dpx2\" (UniqueName: \"kubernetes.io/projected/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-kube-api-access-4dpx2\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385474 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-serving-cert\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385489 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c262802-72e7-4bc1-92bd-bf8b5551cade-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385506 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-images\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385525 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd18b7ad-276e-4008-85c7-e4680c574d27-config\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385543 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385559 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-encryption-config\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385576 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/6f9164f3-da46-447e-94fd-ffda5fb2f565-machine-approver-tls\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385592 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/33ae6fda-0f24-436c-b813-e01bb556110e-apiservice-cert\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385608 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385641 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385655 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385673 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g2wl6\" (UniqueName: \"kubernetes.io/projected/ca154693-ba02-4d7b-a4b6-45fee35a0280-kube-api-access-g2wl6\") pod \"multus-admission-controller-857f4d67dd-cjtzs\" (UID: \"ca154693-ba02-4d7b-a4b6-45fee35a0280\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385693 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385709 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-policies\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385745 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1523dba-d641-467b-96eb-521434ddfa75-profile-collector-cert\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385761 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-config\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385775 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1cb99e0f-ec79-4e50-9284-f161cfe70a93-audit-dir\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385792 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fcb187b2-3022-41a1-a9da-0cb254b18c16-srv-cert\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385817 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-config\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385838 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-images\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385855 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-etcd-client\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385874 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-proxy-tls\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385891 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxt4l\" (UniqueName: \"kubernetes.io/projected/33ae6fda-0f24-436c-b813-e01bb556110e-kube-api-access-gxt4l\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385910 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385927 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385944 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385959 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mztqh\" (UniqueName: \"kubernetes.io/projected/e1523dba-d641-467b-96eb-521434ddfa75-kube-api-access-mztqh\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.385978 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fznpl\" (UniqueName: \"kubernetes.io/projected/78f96e55-8d97-4d74-8a95-44356bb1c0b1-kube-api-access-fznpl\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.386023 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-config\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.386052 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-etcd-serving-ca\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.386093 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a5509fc-2393-4932-be1c-6b9379065100-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.386939 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-config\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.386961 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-etcd-serving-ca\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387123 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-node-pullsecrets\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387180 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f9164f3-da46-447e-94fd-ffda5fb2f565-auth-proxy-config\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387217 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/66ee3af7-a820-42da-8190-e84882cee1d4-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fkltl\" (UID: \"66ee3af7-a820-42da-8190-e84882cee1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387242 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9f86h\" (UniqueName: \"kubernetes.io/projected/66ee3af7-a820-42da-8190-e84882cee1d4-kube-api-access-9f86h\") pod \"cluster-samples-operator-665b6dd947-fkltl\" (UID: \"66ee3af7-a820-42da-8190-e84882cee1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387261 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-audit-policies\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387283 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2314b196-d44a-487b-b9db-e0396b172957-serving-cert\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387304 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1523dba-d641-467b-96eb-521434ddfa75-srv-cert\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387323 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-f8fjk\" (UniqueName: \"kubernetes.io/projected/a316cc87-51ce-49fd-b81c-fab7d253dea6-kube-api-access-f8fjk\") pod \"dns-operator-744455d44c-6whzf\" (UID: \"a316cc87-51ce-49fd-b81c-fab7d253dea6\") " pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387353 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd18b7ad-276e-4008-85c7-e4680c574d27-serving-cert\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387374 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387392 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bv25s\" (UniqueName: \"kubernetes.io/projected/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-kube-api-access-bv25s\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387412 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/336a185f-a030-45f8-93f9-be33304506e1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387433 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/336a185f-a030-45f8-93f9-be33304506e1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387451 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-serving-cert\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387471 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-client\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387489 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-dir\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387511 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2l4h\" (UniqueName: \"kubernetes.io/projected/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-kube-api-access-g2l4h\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387530 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-image-import-ca\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387552 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387570 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bszvw\" (UniqueName: \"kubernetes.io/projected/3722b0ae-dab1-4301-b98e-de27b27ab754-kube-api-access-bszvw\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387589 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fcb187b2-3022-41a1-a9da-0cb254b18c16-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387609 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8jz4s\" (UniqueName: \"kubernetes.io/projected/6f9164f3-da46-447e-94fd-ffda5fb2f565-kube-api-access-8jz4s\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.387658 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.388373 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/78f96e55-8d97-4d74-8a95-44356bb1c0b1-available-featuregates\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.389128 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-config\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.389292 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bd18b7ad-276e-4008-85c7-e4680c574d27-trusted-ca\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.389835 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bd18b7ad-276e-4008-85c7-e4680c574d27-config\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.389881 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.389947 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jwzjg"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.389962 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.390426 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-ca\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.390939 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-config\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.391166 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-audit-dir\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.391507 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-images\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.394525 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-audit\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.395078 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-config\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.395807 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-service-ca\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.395901 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-s4jms"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.395925 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-image-import-ca\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.396458 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.399091 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.399152 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t6lsv"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.401222 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qjjhb"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.402895 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.403051 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-encryption-config\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.403157 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-etcd-client\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.403188 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-serving-cert\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.403497 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.403678 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.403859 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/78f96e55-8d97-4d74-8a95-44356bb1c0b1-serving-cert\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.404475 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bd18b7ad-276e-4008-85c7-e4680c574d27-serving-cert\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.404518 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.404663 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-etcd-client\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.404827 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.406359 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.407708 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.415147 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.415180 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.415800 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-serving-cert\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.415833 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.419074 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zq2mw"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.422315 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xf6qs"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.424630 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.426143 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-9svkn"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.427113 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.427863 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8q98t"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.429293 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.429556 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.429676 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.430877 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.432246 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtc"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.433303 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.434367 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-5mc8q"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.435419 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7kdkq"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.436881 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9svkn"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.438480 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8q98t"] Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.450386 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.469570 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488434 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bszvw\" (UniqueName: \"kubernetes.io/projected/3722b0ae-dab1-4301-b98e-de27b27ab754-kube-api-access-bszvw\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488495 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fcb187b2-3022-41a1-a9da-0cb254b18c16-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488537 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8jz4s\" (UniqueName: \"kubernetes.io/projected/6f9164f3-da46-447e-94fd-ffda5fb2f565-kube-api-access-8jz4s\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488572 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f9164f3-da46-447e-94fd-ffda5fb2f565-config\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488600 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3722b0ae-dab1-4301-b98e-de27b27ab754-secret-volume\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488632 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5xgps\" (UniqueName: \"kubernetes.io/projected/1c262802-72e7-4bc1-92bd-bf8b5551cade-kube-api-access-5xgps\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488675 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488711 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488744 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488812 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ca154693-ba02-4d7b-a4b6-45fee35a0280-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cjtzs\" (UID: \"ca154693-ba02-4d7b-a4b6-45fee35a0280\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488844 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336a185f-a030-45f8-93f9-be33304506e1-config\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488919 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a5509fc-2393-4932-be1c-6b9379065100-config\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.488981 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489042 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489075 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c262802-72e7-4bc1-92bd-bf8b5551cade-proxy-tls\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489106 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/33ae6fda-0f24-436c-b813-e01bb556110e-webhook-cert\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489138 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-etcd-client\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489179 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489260 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mbxdz\" (UniqueName: \"kubernetes.io/projected/1cb99e0f-ec79-4e50-9284-f161cfe70a93-kube-api-access-mbxdz\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489299 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a5509fc-2393-4932-be1c-6b9379065100-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489332 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3722b0ae-dab1-4301-b98e-de27b27ab754-config-volume\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489369 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d6d99019-a94b-4736-b5f4-496d973e0bc8-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9f5vd\" (UID: \"d6d99019-a94b-4736-b5f4-496d973e0bc8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489420 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489463 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hx5nz\" (UniqueName: \"kubernetes.io/projected/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-kube-api-access-hx5nz\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489494 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-config\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489528 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489552 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6f9164f3-da46-447e-94fd-ffda5fb2f565-config\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489569 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489603 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ld58\" (UniqueName: \"kubernetes.io/projected/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-kube-api-access-8ld58\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489639 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p9b7p\" (UniqueName: \"kubernetes.io/projected/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-kube-api-access-p9b7p\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489670 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x4fjj\" (UniqueName: \"kubernetes.io/projected/d6d99019-a94b-4736-b5f4-496d973e0bc8-kube-api-access-x4fjj\") pod \"control-plane-machine-set-operator-78cbb6b69f-9f5vd\" (UID: \"d6d99019-a94b-4736-b5f4-496d973e0bc8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489706 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a316cc87-51ce-49fd-b81c-fab7d253dea6-metrics-tls\") pod \"dns-operator-744455d44c-6whzf\" (UID: \"a316cc87-51ce-49fd-b81c-fab7d253dea6\") " pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489736 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-27wzl\" (UniqueName: \"kubernetes.io/projected/2314b196-d44a-487b-b9db-e0396b172957-kube-api-access-27wzl\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489761 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-encryption-config\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489787 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-client-ca\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489815 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489845 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxvh8\" (UniqueName: \"kubernetes.io/projected/fcb187b2-3022-41a1-a9da-0cb254b18c16-kube-api-access-qxvh8\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489916 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/33ae6fda-0f24-436c-b813-e01bb556110e-tmpfs\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489952 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-serving-cert\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489928 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.489989 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c262802-72e7-4bc1-92bd-bf8b5551cade-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490096 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-images\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490148 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/6f9164f3-da46-447e-94fd-ffda5fb2f565-machine-approver-tls\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490159 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/336a185f-a030-45f8-93f9-be33304506e1-config\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490217 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/33ae6fda-0f24-436c-b813-e01bb556110e-apiservice-cert\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490252 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pnzz5\" (UniqueName: \"kubernetes.io/projected/6187e924-4baa-4a90-a669-891278f2e23d-kube-api-access-pnzz5\") pod \"migrator-59844c95c7-ln6zk\" (UID: \"6187e924-4baa-4a90-a669-891278f2e23d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490281 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490310 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g2wl6\" (UniqueName: \"kubernetes.io/projected/ca154693-ba02-4d7b-a4b6-45fee35a0280-kube-api-access-g2wl6\") pod \"multus-admission-controller-857f4d67dd-cjtzs\" (UID: \"ca154693-ba02-4d7b-a4b6-45fee35a0280\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490343 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490372 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490396 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1cb99e0f-ec79-4e50-9284-f161cfe70a93-audit-dir\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490421 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-policies\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490451 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1523dba-d641-467b-96eb-521434ddfa75-profile-collector-cert\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490479 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/68335fcc-2687-4bf0-8a43-f4e3d3c4702e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-h9sz7\" (UID: \"68335fcc-2687-4bf0-8a43-f4e3d3c4702e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490503 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fcb187b2-3022-41a1-a9da-0cb254b18c16-srv-cert\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490528 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-proxy-tls\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490546 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxt4l\" (UniqueName: \"kubernetes.io/projected/33ae6fda-0f24-436c-b813-e01bb556110e-kube-api-access-gxt4l\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490579 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490599 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490764 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mztqh\" (UniqueName: \"kubernetes.io/projected/e1523dba-d641-467b-96eb-521434ddfa75-kube-api-access-mztqh\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490783 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-auth-proxy-config\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490792 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490850 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txx8z\" (UniqueName: \"kubernetes.io/projected/a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad-kube-api-access-txx8z\") pod \"downloads-7954f5f757-s4jms\" (UID: \"a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad\") " pod="openshift-console/downloads-7954f5f757-s4jms" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490875 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a5509fc-2393-4932-be1c-6b9379065100-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490894 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f9164f3-da46-447e-94fd-ffda5fb2f565-auth-proxy-config\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490925 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2314b196-d44a-487b-b9db-e0396b172957-serving-cert\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490964 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.490982 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/66ee3af7-a820-42da-8190-e84882cee1d4-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fkltl\" (UID: \"66ee3af7-a820-42da-8190-e84882cee1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491041 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9f86h\" (UniqueName: \"kubernetes.io/projected/66ee3af7-a820-42da-8190-e84882cee1d4-kube-api-access-9f86h\") pod \"cluster-samples-operator-665b6dd947-fkltl\" (UID: \"66ee3af7-a820-42da-8190-e84882cee1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491059 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-audit-policies\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491080 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1523dba-d641-467b-96eb-521434ddfa75-srv-cert\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491101 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-f8fjk\" (UniqueName: \"kubernetes.io/projected/a316cc87-51ce-49fd-b81c-fab7d253dea6-kube-api-access-f8fjk\") pod \"dns-operator-744455d44c-6whzf\" (UID: \"a316cc87-51ce-49fd-b81c-fab7d253dea6\") " pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491118 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnlm4\" (UniqueName: \"kubernetes.io/projected/68335fcc-2687-4bf0-8a43-f4e3d3c4702e-kube-api-access-cnlm4\") pod \"package-server-manager-789f6589d5-h9sz7\" (UID: \"68335fcc-2687-4bf0-8a43-f4e3d3c4702e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491153 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/336a185f-a030-45f8-93f9-be33304506e1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491176 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/336a185f-a030-45f8-93f9-be33304506e1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491345 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0a5509fc-2393-4932-be1c-6b9379065100-config\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491199 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-dir\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491833 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-dir\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491913 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.491919 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-config\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.492156 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-client-ca\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.492539 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-images\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.492617 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3722b0ae-dab1-4301-b98e-de27b27ab754-config-volume\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.492889 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/33ae6fda-0f24-436c-b813-e01bb556110e-tmpfs\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.493236 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.493740 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.493910 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.494453 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-audit-policies\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.494767 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-policies\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.495291 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/1c262802-72e7-4bc1-92bd-bf8b5551cade-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.495366 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/6f9164f3-da46-447e-94fd-ffda5fb2f565-auth-proxy-config\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.496048 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1cb99e0f-ec79-4e50-9284-f161cfe70a93-audit-dir\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.495316 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ca154693-ba02-4d7b-a4b6-45fee35a0280-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-cjtzs\" (UID: \"ca154693-ba02-4d7b-a4b6-45fee35a0280\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.496501 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3722b0ae-dab1-4301-b98e-de27b27ab754-secret-volume\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.496759 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/fcb187b2-3022-41a1-a9da-0cb254b18c16-profile-collector-cert\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.497188 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/6f9164f3-da46-447e-94fd-ffda5fb2f565-machine-approver-tls\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.497348 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.497510 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/1c262802-72e7-4bc1-92bd-bf8b5551cade-proxy-tls\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.497564 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.497847 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.498263 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/336a185f-a030-45f8-93f9-be33304506e1-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.499420 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.499479 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/33ae6fda-0f24-436c-b813-e01bb556110e-webhook-cert\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.499534 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2314b196-d44a-487b-b9db-e0396b172957-serving-cert\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.499540 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/66ee3af7-a820-42da-8190-e84882cee1d4-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-fkltl\" (UID: \"66ee3af7-a820-42da-8190-e84882cee1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.499851 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.500074 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/33ae6fda-0f24-436c-b813-e01bb556110e-apiservice-cert\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.499949 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-encryption-config\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.500248 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0a5509fc-2393-4932-be1c-6b9379065100-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.500283 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-etcd-client\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.500388 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/d6d99019-a94b-4736-b5f4-496d973e0bc8-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-9f5vd\" (UID: \"d6d99019-a94b-4736-b5f4-496d973e0bc8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.500581 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-proxy-tls\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.500898 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.501413 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a316cc87-51ce-49fd-b81c-fab7d253dea6-metrics-tls\") pod \"dns-operator-744455d44c-6whzf\" (UID: \"a316cc87-51ce-49fd-b81c-fab7d253dea6\") " pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.501546 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cb99e0f-ec79-4e50-9284-f161cfe70a93-serving-cert\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.501596 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.501793 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/fcb187b2-3022-41a1-a9da-0cb254b18c16-srv-cert\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.502545 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.502740 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/e1523dba-d641-467b-96eb-521434ddfa75-srv-cert\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.510196 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.512138 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.515766 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/e1523dba-d641-467b-96eb-521434ddfa75-profile-collector-cert\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.530002 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.536195 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1cb99e0f-ec79-4e50-9284-f161cfe70a93-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.550301 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.590783 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.592480 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txx8z\" (UniqueName: \"kubernetes.io/projected/a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad-kube-api-access-txx8z\") pod \"downloads-7954f5f757-s4jms\" (UID: \"a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad\") " pod="openshift-console/downloads-7954f5f757-s4jms" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.592544 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.592580 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnlm4\" (UniqueName: \"kubernetes.io/projected/68335fcc-2687-4bf0-8a43-f4e3d3c4702e-kube-api-access-cnlm4\") pod \"package-server-manager-789f6589d5-h9sz7\" (UID: \"68335fcc-2687-4bf0-8a43-f4e3d3c4702e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.592724 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.592764 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p9b7p\" (UniqueName: \"kubernetes.io/projected/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-kube-api-access-p9b7p\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.592843 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pnzz5\" (UniqueName: \"kubernetes.io/projected/6187e924-4baa-4a90-a669-891278f2e23d-kube-api-access-pnzz5\") pod \"migrator-59844c95c7-ln6zk\" (UID: \"6187e924-4baa-4a90-a669-891278f2e23d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.592885 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/68335fcc-2687-4bf0-8a43-f4e3d3c4702e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-h9sz7\" (UID: \"68335fcc-2687-4bf0-8a43-f4e3d3c4702e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.609538 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.630074 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.649416 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.669789 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.690179 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.710142 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.735063 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.749207 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.770409 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.789666 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.810076 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.816601 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.830126 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.834276 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.850284 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.870483 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.900405 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.910430 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.930248 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.951186 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.970097 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.976583 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/68335fcc-2687-4bf0-8a43-f4e3d3c4702e-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-h9sz7\" (UID: \"68335fcc-2687-4bf0-8a43-f4e3d3c4702e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:17 crc kubenswrapper[4948]: I0122 14:21:17.990223 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.010794 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.030500 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.058460 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.070297 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.091715 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.110869 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.130688 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.150082 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.171135 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.189919 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.210510 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.230558 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.249647 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.270444 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.289926 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.310844 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.330333 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.350902 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.368469 4948 request.go:700] Waited for 1.012158549s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-route-controller-manager/secrets?fieldSelector=metadata.name%3Dserving-cert&limit=500&resourceVersion=0 Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.370527 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.390968 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.410322 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.436930 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.450446 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.470160 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.489817 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.509974 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.530297 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.550704 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.575234 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.590623 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.610039 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.629954 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.649546 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.670160 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.689802 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.716086 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.729943 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.749988 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.770141 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.795206 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.810751 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.830627 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.850173 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.869591 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.890192 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.909851 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.929771 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.950654 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.970193 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 14:21:18 crc kubenswrapper[4948]: I0122 14:21:18.990411 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.009202 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.029969 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.050062 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.069951 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.089725 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.124201 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mwn2r\" (UniqueName: \"kubernetes.io/projected/bd18b7ad-276e-4008-85c7-e4680c574d27-kube-api-access-mwn2r\") pod \"console-operator-58897d9998-4mvfm\" (UID: \"bd18b7ad-276e-4008-85c7-e4680c574d27\") " pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.146921 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g22j\" (UniqueName: \"kubernetes.io/projected/1a92e7d6-836a-4227-9ee8-3db4353fdfa1-kube-api-access-8g22j\") pod \"openshift-apiserver-operator-796bbdcf4f-zfs47\" (UID: \"1a92e7d6-836a-4227-9ee8-3db4353fdfa1\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.164920 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fznpl\" (UniqueName: \"kubernetes.io/projected/78f96e55-8d97-4d74-8a95-44356bb1c0b1-kube-api-access-fznpl\") pod \"openshift-config-operator-7777fb866f-974mb\" (UID: \"78f96e55-8d97-4d74-8a95-44356bb1c0b1\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.192386 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kclqr\" (UniqueName: \"kubernetes.io/projected/f2c0d1d9-76f0-45bb-8a15-88a0876d5b76-kube-api-access-kclqr\") pod \"machine-api-operator-5694c8668f-bbmbn\" (UID: \"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.204266 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dpx2\" (UniqueName: \"kubernetes.io/projected/deeb21d3-29ae-4db0-a982-461ee3d6cd4b-kube-api-access-4dpx2\") pod \"etcd-operator-b45778765-qxjql\" (UID: \"deeb21d3-29ae-4db0-a982-461ee3d6cd4b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.235253 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bv25s\" (UniqueName: \"kubernetes.io/projected/b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0-kube-api-access-bv25s\") pod \"apiserver-76f77b778f-lkpqv\" (UID: \"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0\") " pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.245115 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2l4h\" (UniqueName: \"kubernetes.io/projected/7ba1d16e-01d0-42f5-b3ef-9196a8a11197-kube-api-access-g2l4h\") pod \"openshift-controller-manager-operator-756b6f6bc6-pz6k6\" (UID: \"7ba1d16e-01d0-42f5-b3ef-9196a8a11197\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.270617 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.290246 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.298497 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.310539 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.327935 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.329920 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.350279 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.361992 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.370669 4948 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.382677 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.389100 4948 request.go:700] Waited for 1.9003874s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/serviceaccounts/collect-profiles/token Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.410404 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.414960 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bszvw\" (UniqueName: \"kubernetes.io/projected/3722b0ae-dab1-4301-b98e-de27b27ab754-kube-api-access-bszvw\") pod \"collect-profiles-29484855-52bdf\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.431304 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8jz4s\" (UniqueName: \"kubernetes.io/projected/6f9164f3-da46-447e-94fd-ffda5fb2f565-kube-api-access-8jz4s\") pod \"machine-approver-56656f9798-x5qwp\" (UID: \"6f9164f3-da46-447e-94fd-ffda5fb2f565\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.432923 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.452225 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5xgps\" (UniqueName: \"kubernetes.io/projected/1c262802-72e7-4bc1-92bd-bf8b5551cade-kube-api-access-5xgps\") pod \"machine-config-controller-84d6567774-ps89b\" (UID: \"1c262802-72e7-4bc1-92bd-bf8b5551cade\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.457954 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.466863 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ld58\" (UniqueName: \"kubernetes.io/projected/ce5de36e-7ed5-42bb-bf9d-28bf32c76edf-kube-api-access-8ld58\") pod \"machine-config-operator-74547568cd-l9pc7\" (UID: \"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.485901 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x4fjj\" (UniqueName: \"kubernetes.io/projected/d6d99019-a94b-4736-b5f4-496d973e0bc8-kube-api-access-x4fjj\") pod \"control-plane-machine-set-operator-78cbb6b69f-9f5vd\" (UID: \"d6d99019-a94b-4736-b5f4-496d973e0bc8\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.505789 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mbxdz\" (UniqueName: \"kubernetes.io/projected/1cb99e0f-ec79-4e50-9284-f161cfe70a93-kube-api-access-mbxdz\") pod \"apiserver-7bbb656c7d-v7qjh\" (UID: \"1cb99e0f-ec79-4e50-9284-f161cfe70a93\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.529067 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxvh8\" (UniqueName: \"kubernetes.io/projected/fcb187b2-3022-41a1-a9da-0cb254b18c16-kube-api-access-qxvh8\") pod \"olm-operator-6b444d44fb-zx2x2\" (UID: \"fcb187b2-3022-41a1-a9da-0cb254b18c16\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.535024 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.535787 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lkpqv"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.540468 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-4mvfm"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.546575 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.551530 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.555285 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hx5nz\" (UniqueName: \"kubernetes.io/projected/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-kube-api-access-hx5nz\") pod \"oauth-openshift-558db77b4-zq2mw\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.557837 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.574527 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9f86h\" (UniqueName: \"kubernetes.io/projected/66ee3af7-a820-42da-8190-e84882cee1d4-kube-api-access-9f86h\") pod \"cluster-samples-operator-665b6dd947-fkltl\" (UID: \"66ee3af7-a820-42da-8190-e84882cee1d4\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.574766 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.589180 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-27wzl\" (UniqueName: \"kubernetes.io/projected/2314b196-d44a-487b-b9db-e0396b172957-kube-api-access-27wzl\") pod \"controller-manager-879f6c89f-xf6qs\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.607628 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0a5509fc-2393-4932-be1c-6b9379065100-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-sv2zr\" (UID: \"0a5509fc-2393-4932-be1c-6b9379065100\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.625745 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mztqh\" (UniqueName: \"kubernetes.io/projected/e1523dba-d641-467b-96eb-521434ddfa75-kube-api-access-mztqh\") pod \"catalog-operator-68c6474976-5jkws\" (UID: \"e1523dba-d641-467b-96eb-521434ddfa75\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.650300 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.655186 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.656027 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g2wl6\" (UniqueName: \"kubernetes.io/projected/ca154693-ba02-4d7b-a4b6-45fee35a0280-kube-api-access-g2wl6\") pod \"multus-admission-controller-857f4d67dd-cjtzs\" (UID: \"ca154693-ba02-4d7b-a4b6-45fee35a0280\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.665576 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/336a185f-a030-45f8-93f9-be33304506e1-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qxj2h\" (UID: \"336a185f-a030-45f8-93f9-be33304506e1\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.667336 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.688738 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-f8fjk\" (UniqueName: \"kubernetes.io/projected/a316cc87-51ce-49fd-b81c-fab7d253dea6-kube-api-access-f8fjk\") pod \"dns-operator-744455d44c-6whzf\" (UID: \"a316cc87-51ce-49fd-b81c-fab7d253dea6\") " pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.710484 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxt4l\" (UniqueName: \"kubernetes.io/projected/33ae6fda-0f24-436c-b813-e01bb556110e-kube-api-access-gxt4l\") pod \"packageserver-d55dfcdfc-6j7zl\" (UID: \"33ae6fda-0f24-436c-b813-e01bb556110e\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.719456 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4mvfm" event={"ID":"bd18b7ad-276e-4008-85c7-e4680c574d27","Type":"ContainerStarted","Data":"503bf6077a40515bf8b96a8714e1a4505698fd89bd9b080faa8a5c8e7ed2f1a7"} Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.720193 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" event={"ID":"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0","Type":"ContainerStarted","Data":"2ca7cd3113378f042313cf76ccb821b0d34683a044dce77727e2ea5951fb33d1"} Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.746915 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txx8z\" (UniqueName: \"kubernetes.io/projected/a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad-kube-api-access-txx8z\") pod \"downloads-7954f5f757-s4jms\" (UID: \"a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad\") " pod="openshift-console/downloads-7954f5f757-s4jms" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.768541 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.769202 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnlm4\" (UniqueName: \"kubernetes.io/projected/68335fcc-2687-4bf0-8a43-f4e3d3c4702e-kube-api-access-cnlm4\") pod \"package-server-manager-789f6589d5-h9sz7\" (UID: \"68335fcc-2687-4bf0-8a43-f4e3d3c4702e\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.769328 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.777259 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.780989 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.781043 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-974mb"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.789646 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p9b7p\" (UniqueName: \"kubernetes.io/projected/4c1d05e6-fc11-4fb5-bf46-c346ded55ea8-kube-api-access-p9b7p\") pod \"kube-storage-version-migrator-operator-b67b599dd-jkbsc\" (UID: \"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.801976 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" Jan 22 14:21:19 crc kubenswrapper[4948]: W0122 14:21:19.804912 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7ba1d16e_01d0_42f5_b3ef_9196a8a11197.slice/crio-fcbb131f7ec26a2a8f5265fcd9173a7cebbc9d955ff88a24efbdf5fd73ce9c49 WatchSource:0}: Error finding container fcbb131f7ec26a2a8f5265fcd9173a7cebbc9d955ff88a24efbdf5fd73ce9c49: Status 404 returned error can't find the container with id fcbb131f7ec26a2a8f5265fcd9173a7cebbc9d955ff88a24efbdf5fd73ce9c49 Jan 22 14:21:19 crc kubenswrapper[4948]: W0122 14:21:19.806336 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78f96e55_8d97_4d74_8a95_44356bb1c0b1.slice/crio-20230eb4c969a65235fff1d2eee0115b4ced0e39cbfc1a231ba4a1fbb160a4b7 WatchSource:0}: Error finding container 20230eb4c969a65235fff1d2eee0115b4ced0e39cbfc1a231ba4a1fbb160a4b7: Status 404 returned error can't find the container with id 20230eb4c969a65235fff1d2eee0115b4ced0e39cbfc1a231ba4a1fbb160a4b7 Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.806586 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pnzz5\" (UniqueName: \"kubernetes.io/projected/6187e924-4baa-4a90-a669-891278f2e23d-kube-api-access-pnzz5\") pod \"migrator-59844c95c7-ln6zk\" (UID: \"6187e924-4baa-4a90-a669-891278f2e23d\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828469 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-config\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828501 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828524 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw4mq\" (UniqueName: \"kubernetes.io/projected/3b860e63-df81-4b66-a052-835876d6cb58-kube-api-access-sw4mq\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828544 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lkz2x\" (UniqueName: \"kubernetes.io/projected/12fa60a3-783c-4015-b759-ab636fd6e559-kube-api-access-lkz2x\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828561 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-metrics-certs\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828576 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/85276719-eead-4d75-9df7-219df59cfd40-node-bootstrap-token\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828602 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828616 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7x6h\" (UniqueName: \"kubernetes.io/projected/cfbebb9e-7111-4271-bff9-e663d96e586e-kube-api-access-l7x6h\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828702 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828719 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-service-ca\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828736 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-certificates\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828767 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c9115ab-73ac-4742-96e7-041345c9c222-serving-cert\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828793 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r4772\" (UniqueName: \"kubernetes.io/projected/90f993a5-e187-4e11-9295-fba46ad3f759-kube-api-access-r4772\") pod \"ingress-canary-t6lsv\" (UID: \"90f993a5-e187-4e11-9295-fba46ad3f759\") " pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828808 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad723228-be0e-414d-a738-9ae3a916bef7-trusted-ca\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828834 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fec867e7-c935-489e-b5e9-70d0cf71413b-serving-cert\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828849 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/85276719-eead-4d75-9df7-219df59cfd40-certs\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828872 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/627c131b-4a05-4674-8889-fba91fd90ec7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.828886 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qhh5h\" (UniqueName: \"kubernetes.io/projected/ad723228-be0e-414d-a738-9ae3a916bef7-kube-api-access-qhh5h\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: E0122 14:21:19.828910 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.328897062 +0000 UTC m=+138.658569991 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.830111 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3b860e63-df81-4b66-a052-835876d6cb58-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.830451 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfbebb9e-7111-4271-bff9-e663d96e586e-service-ca-bundle\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.830528 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-tls\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.830562 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-config\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.830601 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5llx\" (UniqueName: \"kubernetes.io/projected/fec867e7-c935-489e-b5e9-70d0cf71413b-kube-api-access-k5llx\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.830617 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad723228-be0e-414d-a738-9ae3a916bef7-metrics-tls\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831154 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90f993a5-e187-4e11-9295-fba46ad3f759-cert\") pod \"ingress-canary-t6lsv\" (UID: \"90f993a5-e187-4e11-9295-fba46ad3f759\") " pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831231 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831253 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/627c131b-4a05-4674-8889-fba91fd90ec7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831288 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdbdb\" (UniqueName: \"kubernetes.io/projected/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-kube-api-access-bdbdb\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831314 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a187fdb-fd89-404c-b194-00ef10ec1a01-serving-cert\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831409 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-serving-cert\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831435 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-trusted-ca\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831454 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/627c131b-4a05-4674-8889-fba91fd90ec7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831512 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-service-ca-bundle\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831561 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-config\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831576 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-oauth-serving-cert\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831593 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-stats-auth\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831611 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-trusted-ca-bundle\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831650 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b860e63-df81-4b66-a052-835876d6cb58-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831672 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2d82g\" (UniqueName: \"kubernetes.io/projected/85276719-eead-4d75-9df7-219df59cfd40-kube-api-access-2d82g\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831716 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a187fdb-fd89-404c-b194-00ef10ec1a01-config\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831740 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-oauth-config\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831767 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad723228-be0e-414d-a738-9ae3a916bef7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831830 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831850 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-client-ca\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831893 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ncpfm\" (UniqueName: \"kubernetes.io/projected/8a187fdb-fd89-404c-b194-00ef10ec1a01-kube-api-access-ncpfm\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831919 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-default-certificate\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.831982 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-bound-sa-token\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.832006 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bw4sw\" (UniqueName: \"kubernetes.io/projected/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-kube-api-access-bw4sw\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.832058 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbnp8\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-kube-api-access-qbnp8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.832081 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-signing-cabundle\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.832124 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.832149 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm2pg\" (UniqueName: \"kubernetes.io/projected/8c9115ab-73ac-4742-96e7-041345c9c222-kube-api-access-cm2pg\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.832164 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-signing-key\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.832188 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b860e63-df81-4b66-a052-835876d6cb58-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: W0122 14:21:19.851238 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f9164f3_da46_447e_94fd_ffda5fb2f565.slice/crio-eeb58a51ed52ea44985f1c5962b158643876c77e0decc59f6683a14c5a11f43d WatchSource:0}: Error finding container eeb58a51ed52ea44985f1c5962b158643876c77e0decc59f6683a14c5a11f43d: Status 404 returned error can't find the container with id eeb58a51ed52ea44985f1c5962b158643876c77e0decc59f6683a14c5a11f43d Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.865862 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.870606 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.875422 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.889735 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.889926 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.898454 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bbmbn"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.902005 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-qxjql"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.905123 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.927484 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.933677 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.933928 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3b860e63-df81-4b66-a052-835876d6cb58-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.933964 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pbmdc\" (UniqueName: \"kubernetes.io/projected/e31f1c5f-27f0-4742-a04b-91544011bdad-kube-api-access-pbmdc\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.933989 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-tls\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934005 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-config\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934041 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfbebb9e-7111-4271-bff9-e663d96e586e-service-ca-bundle\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934060 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-registration-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934078 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5llx\" (UniqueName: \"kubernetes.io/projected/fec867e7-c935-489e-b5e9-70d0cf71413b-kube-api-access-k5llx\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934095 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad723228-be0e-414d-a738-9ae3a916bef7-metrics-tls\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934131 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-metrics-tls\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934160 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90f993a5-e187-4e11-9295-fba46ad3f759-cert\") pod \"ingress-canary-t6lsv\" (UID: \"90f993a5-e187-4e11-9295-fba46ad3f759\") " pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934192 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934210 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/627c131b-4a05-4674-8889-fba91fd90ec7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934228 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdbdb\" (UniqueName: \"kubernetes.io/projected/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-kube-api-access-bdbdb\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934245 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-csi-data-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934287 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a187fdb-fd89-404c-b194-00ef10ec1a01-serving-cert\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934301 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-plugins-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934330 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-serving-cert\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934346 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-trusted-ca\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934364 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/627c131b-4a05-4674-8889-fba91fd90ec7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934382 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-config-volume\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934412 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-service-ca-bundle\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934428 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgcpq\" (UniqueName: \"kubernetes.io/projected/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-kube-api-access-xgcpq\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934520 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-config\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934538 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-oauth-serving-cert\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934554 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-trusted-ca-bundle\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934579 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-stats-auth\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934613 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b860e63-df81-4b66-a052-835876d6cb58-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934631 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2d82g\" (UniqueName: \"kubernetes.io/projected/85276719-eead-4d75-9df7-219df59cfd40-kube-api-access-2d82g\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934647 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a187fdb-fd89-404c-b194-00ef10ec1a01-config\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934663 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-oauth-config\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934694 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad723228-be0e-414d-a738-9ae3a916bef7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934726 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934749 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-mountpoint-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934779 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-client-ca\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934796 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ncpfm\" (UniqueName: \"kubernetes.io/projected/8a187fdb-fd89-404c-b194-00ef10ec1a01-kube-api-access-ncpfm\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934813 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-default-certificate\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934833 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-bound-sa-token\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934874 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bw4sw\" (UniqueName: \"kubernetes.io/projected/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-kube-api-access-bw4sw\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934899 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbnp8\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-kube-api-access-qbnp8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934915 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-signing-cabundle\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934930 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm2pg\" (UniqueName: \"kubernetes.io/projected/8c9115ab-73ac-4742-96e7-041345c9c222-kube-api-access-cm2pg\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934947 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-signing-key\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.934964 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.935442 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.936316 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.936693 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-config\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.936901 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-config\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.936986 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8a187fdb-fd89-404c-b194-00ef10ec1a01-config\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.937470 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cfbebb9e-7111-4271-bff9-e663d96e586e-service-ca-bundle\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.937940 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-oauth-serving-cert\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.940057 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-tls\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.940519 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-client-ca\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.941207 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.942574 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-ca-trust-extracted\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.942679 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-oauth-config\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.943049 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b860e63-df81-4b66-a052-835876d6cb58-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.943256 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-trusted-ca-bundle\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.943557 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-stats-auth\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.943831 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-config\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944093 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8c9115ab-73ac-4742-96e7-041345c9c222-service-ca-bundle\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944195 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw4mq\" (UniqueName: \"kubernetes.io/projected/3b860e63-df81-4b66-a052-835876d6cb58-kube-api-access-sw4mq\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944248 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-socket-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944334 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lkz2x\" (UniqueName: \"kubernetes.io/projected/12fa60a3-783c-4015-b759-ab636fd6e559-kube-api-access-lkz2x\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944364 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/85276719-eead-4d75-9df7-219df59cfd40-node-bootstrap-token\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944410 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-metrics-certs\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944643 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/90f993a5-e187-4e11-9295-fba46ad3f759-cert\") pod \"ingress-canary-t6lsv\" (UID: \"90f993a5-e187-4e11-9295-fba46ad3f759\") " pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944864 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-signing-cabundle\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.944959 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/627c131b-4a05-4674-8889-fba91fd90ec7-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.945213 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-serving-cert\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.946842 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3b860e63-df81-4b66-a052-835876d6cb58-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.947348 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.947372 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-console-config\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.947376 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7x6h\" (UniqueName: \"kubernetes.io/projected/cfbebb9e-7111-4271-bff9-e663d96e586e-kube-api-access-l7x6h\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.947580 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.947608 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-service-ca\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.947646 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-signing-key\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.947885 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-certificates\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.948108 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-metrics-certs\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.948170 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c9115ab-73ac-4742-96e7-041345c9c222-serving-cert\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.948234 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r4772\" (UniqueName: \"kubernetes.io/projected/90f993a5-e187-4e11-9295-fba46ad3f759-kube-api-access-r4772\") pod \"ingress-canary-t6lsv\" (UID: \"90f993a5-e187-4e11-9295-fba46ad3f759\") " pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.948255 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad723228-be0e-414d-a738-9ae3a916bef7-trusted-ca\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.949116 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/cfbebb9e-7111-4271-bff9-e663d96e586e-default-certificate\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.949625 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/3b860e63-df81-4b66-a052-835876d6cb58-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.949856 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fec867e7-c935-489e-b5e9-70d0cf71413b-serving-cert\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.949903 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/85276719-eead-4d75-9df7-219df59cfd40-certs\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.949933 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/627c131b-4a05-4674-8889-fba91fd90ec7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.949987 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qhh5h\" (UniqueName: \"kubernetes.io/projected/ad723228-be0e-414d-a738-9ae3a916bef7-kube-api-access-qhh5h\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.950463 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/ad723228-be0e-414d-a738-9ae3a916bef7-trusted-ca\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.950539 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/627c131b-4a05-4674-8889-fba91fd90ec7-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.950602 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/85276719-eead-4d75-9df7-219df59cfd40-node-bootstrap-token\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.953474 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/85276719-eead-4d75-9df7-219df59cfd40-certs\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.955741 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:19 crc kubenswrapper[4948]: E0122 14:21:19.956088 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.456067269 +0000 UTC m=+138.785740198 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.957710 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-certificates\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.960690 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/ad723228-be0e-414d-a738-9ae3a916bef7-metrics-tls\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.962046 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-trusted-ca\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.962616 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-installation-pull-secrets\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.962900 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8a187fdb-fd89-404c-b194-00ef10ec1a01-serving-cert\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.972167 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-s4jms" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.991348 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xf6qs"] Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.992356 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" Jan 22 14:21:19 crc kubenswrapper[4948]: I0122 14:21:19.997540 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/3b860e63-df81-4b66-a052-835876d6cb58-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.010463 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2d82g\" (UniqueName: \"kubernetes.io/projected/85276719-eead-4d75-9df7-219df59cfd40-kube-api-access-2d82g\") pod \"machine-config-server-28tff\" (UID: \"85276719-eead-4d75-9df7-219df59cfd40\") " pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.010527 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.032419 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5llx\" (UniqueName: \"kubernetes.io/projected/fec867e7-c935-489e-b5e9-70d0cf71413b-kube-api-access-k5llx\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.047882 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7"] Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.047951 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf"] Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.053064 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm2pg\" (UniqueName: \"kubernetes.io/projected/8c9115ab-73ac-4742-96e7-041345c9c222-kube-api-access-cm2pg\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.053396 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.055786 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.055822 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-socket-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.055911 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pbmdc\" (UniqueName: \"kubernetes.io/projected/e31f1c5f-27f0-4742-a04b-91544011bdad-kube-api-access-pbmdc\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.055932 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-registration-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.055957 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-metrics-tls\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.055987 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-csi-data-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.056031 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-plugins-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.056055 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-config-volume\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.056077 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgcpq\" (UniqueName: \"kubernetes.io/projected/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-kube-api-access-xgcpq\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.056112 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-mountpoint-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.056203 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-mountpoint-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.056659 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.556641822 +0000 UTC m=+138.886314751 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.057482 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-socket-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.057665 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-registration-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.058506 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-csi-data-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.058635 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/e31f1c5f-27f0-4742-a04b-91544011bdad-plugins-dir\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.058924 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-config-volume\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.062413 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-metrics-tls\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.068290 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ncpfm\" (UniqueName: \"kubernetes.io/projected/8a187fdb-fd89-404c-b194-00ef10ec1a01-kube-api-access-ncpfm\") pod \"service-ca-operator-777779d784-g5zjf\" (UID: \"8a187fdb-fd89-404c-b194-00ef10ec1a01\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.083657 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bw4sw\" (UniqueName: \"kubernetes.io/projected/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-kube-api-access-bw4sw\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.085263 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-28tff" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.104597 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-bound-sa-token\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.128381 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbnp8\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-kube-api-access-qbnp8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.149061 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw4mq\" (UniqueName: \"kubernetes.io/projected/3b860e63-df81-4b66-a052-835876d6cb58-kube-api-access-sw4mq\") pod \"cluster-image-registry-operator-dc59b4c8b-6bb9s\" (UID: \"3b860e63-df81-4b66-a052-835876d6cb58\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.158142 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.158266 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.658230093 +0000 UTC m=+138.987903022 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.158494 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.159046 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.659038295 +0000 UTC m=+138.988711224 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.169444 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/627c131b-4a05-4674-8889-fba91fd90ec7-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-f4955\" (UID: \"627c131b-4a05-4674-8889-fba91fd90ec7\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.184435 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdbdb\" (UniqueName: \"kubernetes.io/projected/859c9f3a-e3af-45a7-bc7c-ea5838c24a4b-kube-api-access-bdbdb\") pod \"service-ca-9c57cc56f-qjjhb\" (UID: \"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b\") " pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.204324 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/ad723228-be0e-414d-a738-9ae3a916bef7-bound-sa-token\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.224905 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7x6h\" (UniqueName: \"kubernetes.io/projected/cfbebb9e-7111-4271-bff9-e663d96e586e-kube-api-access-l7x6h\") pod \"router-default-5444994796-xgx8j\" (UID: \"cfbebb9e-7111-4271-bff9-e663d96e586e\") " pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.246671 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lkz2x\" (UniqueName: \"kubernetes.io/projected/12fa60a3-783c-4015-b759-ab636fd6e559-kube-api-access-lkz2x\") pod \"marketplace-operator-79b997595-7cjtc\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.259660 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.259852 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.759821614 +0000 UTC m=+139.089494553 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.260196 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.260777 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.760747109 +0000 UTC m=+139.090420078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.265255 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r4772\" (UniqueName: \"kubernetes.io/projected/90f993a5-e187-4e11-9295-fba46ad3f759-kube-api-access-r4772\") pod \"ingress-canary-t6lsv\" (UID: \"90f993a5-e187-4e11-9295-fba46ad3f759\") " pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.283370 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qhh5h\" (UniqueName: \"kubernetes.io/projected/ad723228-be0e-414d-a738-9ae3a916bef7-kube-api-access-qhh5h\") pod \"ingress-operator-5b745b69d9-lt5mv\" (UID: \"ad723228-be0e-414d-a738-9ae3a916bef7\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.296218 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.303439 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.319312 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.325053 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pbmdc\" (UniqueName: \"kubernetes.io/projected/e31f1c5f-27f0-4742-a04b-91544011bdad-kube-api-access-pbmdc\") pod \"csi-hostpathplugin-8q98t\" (UID: \"e31f1c5f-27f0-4742-a04b-91544011bdad\") " pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.332431 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh"] Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.332699 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.346993 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.360100 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h"] Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.361914 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.362095 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.862072333 +0000 UTC m=+139.191745262 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.362382 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.362397 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr"] Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.362713 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.8627064 +0000 UTC m=+139.192379329 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.364433 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-6whzf"] Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.366951 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.375509 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.391270 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-t6lsv" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.413873 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.463914 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.464105 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.964078176 +0000 UTC m=+139.293751115 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.464353 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.464762 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:20.964754294 +0000 UTC m=+139.294427213 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.566042 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.566637 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.066616693 +0000 UTC m=+139.396289632 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.670356 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.670892 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.170875656 +0000 UTC m=+139.500548595 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.724448 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" event={"ID":"1c262802-72e7-4bc1-92bd-bf8b5551cade","Type":"ContainerStarted","Data":"c751129a6da4407b4601fcff053974ee4fd391850107c1a21b14d5c09cf39eba"} Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.725134 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" event={"ID":"7ba1d16e-01d0-42f5-b3ef-9196a8a11197","Type":"ContainerStarted","Data":"fcbb131f7ec26a2a8f5265fcd9173a7cebbc9d955ff88a24efbdf5fd73ce9c49"} Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.726551 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" event={"ID":"6f9164f3-da46-447e-94fd-ffda5fb2f565","Type":"ContainerStarted","Data":"eeb58a51ed52ea44985f1c5962b158643876c77e0decc59f6683a14c5a11f43d"} Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.727716 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" event={"ID":"78f96e55-8d97-4d74-8a95-44356bb1c0b1","Type":"ContainerStarted","Data":"20230eb4c969a65235fff1d2eee0115b4ced0e39cbfc1a231ba4a1fbb160a4b7"} Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.771183 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.771289 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.271264584 +0000 UTC m=+139.600937523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.771426 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.771794 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.271784648 +0000 UTC m=+139.601457587 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.864719 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/5defb8aa-bd1f-43d9-a86d-0026afa4f8d1-service-ca\") pod \"console-f9d7485db-7kdkq\" (UID: \"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1\") " pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.865524 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8c9115ab-73ac-4742-96e7-041345c9c222-serving-cert\") pod \"authentication-operator-69f744f599-jwzjg\" (UID: \"8c9115ab-73ac-4742-96e7-041345c9c222\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.865641 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fec867e7-c935-489e-b5e9-70d0cf71413b-serving-cert\") pod \"route-controller-manager-6576b87f9c-6p6lm\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.868880 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgcpq\" (UniqueName: \"kubernetes.io/projected/0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c-kube-api-access-xgcpq\") pod \"dns-default-9svkn\" (UID: \"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c\") " pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.872433 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.872891 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.372873426 +0000 UTC m=+139.702546365 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.885170 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.894019 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zq2mw"] Jan 22 14:21:20 crc kubenswrapper[4948]: W0122 14:21:20.926752 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod336a185f_a030_45f8_93f9_be33304506e1.slice/crio-9f8e541da53da43cabc6fa2ea0621734c8dfa2ae4a4b7383faec78a5f11e6ec5 WatchSource:0}: Error finding container 9f8e541da53da43cabc6fa2ea0621734c8dfa2ae4a4b7383faec78a5f11e6ec5: Status 404 returned error can't find the container with id 9f8e541da53da43cabc6fa2ea0621734c8dfa2ae4a4b7383faec78a5f11e6ec5 Jan 22 14:21:20 crc kubenswrapper[4948]: W0122 14:21:20.929234 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1cb99e0f_ec79_4e50_9284_f161cfe70a93.slice/crio-5e1967c7ecadf3543c715b324ec221a80542cc17e7d873979f8ef4c0ad1cbcc5 WatchSource:0}: Error finding container 5e1967c7ecadf3543c715b324ec221a80542cc17e7d873979f8ef4c0ad1cbcc5: Status 404 returned error can't find the container with id 5e1967c7ecadf3543c715b324ec221a80542cc17e7d873979f8ef4c0ad1cbcc5 Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.939802 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:20 crc kubenswrapper[4948]: W0122 14:21:20.945388 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda316cc87_51ce_49fd_b81c_fab7d253dea6.slice/crio-8c960bb82c7cff619f8118bfd32e8f9a69a0cf6ec3a4f455972c0047d65c3fce WatchSource:0}: Error finding container 8c960bb82c7cff619f8118bfd32e8f9a69a0cf6ec3a4f455972c0047d65c3fce: Status 404 returned error can't find the container with id 8c960bb82c7cff619f8118bfd32e8f9a69a0cf6ec3a4f455972c0047d65c3fce Jan 22 14:21:20 crc kubenswrapper[4948]: W0122 14:21:20.947193 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a5509fc_2393_4932_be1c_6b9379065100.slice/crio-57c742a19e0a7d88dd84d14bf4f3b98619ddd72a9692be4ca3a1b83a627ca4b5 WatchSource:0}: Error finding container 57c742a19e0a7d88dd84d14bf4f3b98619ddd72a9692be4ca3a1b83a627ca4b5: Status 404 returned error can't find the container with id 57c742a19e0a7d88dd84d14bf4f3b98619ddd72a9692be4ca3a1b83a627ca4b5 Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.960241 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:20 crc kubenswrapper[4948]: W0122 14:21:20.969135 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6d971c19_f94e_4e30_8cf4_ae0fb8be0ad6.slice/crio-48ab9e553ceeb06e5cf1a69c4535072ac97e1e436904cfb949fe585162cd3308 WatchSource:0}: Error finding container 48ab9e553ceeb06e5cf1a69c4535072ac97e1e436904cfb949fe585162cd3308: Status 404 returned error can't find the container with id 48ab9e553ceeb06e5cf1a69c4535072ac97e1e436904cfb949fe585162cd3308 Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.973928 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:20 crc kubenswrapper[4948]: E0122 14:21:20.974657 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.474632301 +0000 UTC m=+139.804305410 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:20 crc kubenswrapper[4948]: I0122 14:21:20.999619 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.074991 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.075339 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.575300367 +0000 UTC m=+139.904973296 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.077552 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.078474 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.578456463 +0000 UTC m=+139.908129392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.183446 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.185642 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.685480272 +0000 UTC m=+140.015153201 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.186705 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.187358 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.687338782 +0000 UTC m=+140.017011711 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.293909 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.294184 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.794162325 +0000 UTC m=+140.123835254 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.297727 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.298159 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.798138174 +0000 UTC m=+140.127811103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.312678 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.334808 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.394190 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.400866 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.401327 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:21.901303858 +0000 UTC m=+140.230976787 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.447043 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.503746 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.504792 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.004743529 +0000 UTC m=+140.334416458 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: W0122 14:21:21.531246 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod85276719_eead_4d75_9df7_219df59cfd40.slice/crio-19fa86ee0e03de13e7c544a0d9f92b2d45fd73c436d35907c4c56d0d4ebcae09 WatchSource:0}: Error finding container 19fa86ee0e03de13e7c544a0d9f92b2d45fd73c436d35907c4c56d0d4ebcae09: Status 404 returned error can't find the container with id 19fa86ee0e03de13e7c544a0d9f92b2d45fd73c436d35907c4c56d0d4ebcae09 Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.605456 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.605943 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.105921439 +0000 UTC m=+140.435594368 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.668682 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.709844 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.710213 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.210196524 +0000 UTC m=+140.539869453 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: W0122 14:21:21.714096 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8a187fdb_fd89_404c_b194_00ef10ec1a01.slice/crio-eddd6a43fe8fa0e24c99910e1ca99a956ae88d7aa061b658377d0d4aa1c30362 WatchSource:0}: Error finding container eddd6a43fe8fa0e24c99910e1ca99a956ae88d7aa061b658377d0d4aa1c30362: Status 404 returned error can't find the container with id eddd6a43fe8fa0e24c99910e1ca99a956ae88d7aa061b658377d0d4aa1c30362 Jan 22 14:21:21 crc kubenswrapper[4948]: W0122 14:21:21.717302 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod627c131b_4a05_4674_8889_fba91fd90ec7.slice/crio-738b236827f03256f296878689c4d0997d3b06bb119eddc130ea841eb94d40f9 WatchSource:0}: Error finding container 738b236827f03256f296878689c4d0997d3b06bb119eddc130ea841eb94d40f9: Status 404 returned error can't find the container with id 738b236827f03256f296878689c4d0997d3b06bb119eddc130ea841eb94d40f9 Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.760133 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.770069 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" event={"ID":"8a187fdb-fd89-404c-b194-00ef10ec1a01","Type":"ContainerStarted","Data":"eddd6a43fe8fa0e24c99910e1ca99a956ae88d7aa061b658377d0d4aa1c30362"} Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.787083 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" event={"ID":"1cb99e0f-ec79-4e50-9284-f161cfe70a93","Type":"ContainerStarted","Data":"5e1967c7ecadf3543c715b324ec221a80542cc17e7d873979f8ef4c0ad1cbcc5"} Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.810574 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.811998 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.311876616 +0000 UTC m=+140.641549545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.820245 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.850285 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" event={"ID":"fcb187b2-3022-41a1-a9da-0cb254b18c16","Type":"ContainerStarted","Data":"b098047655a5b3155174ba73f625cc35ca746f59e397b106d2b9c35a3c4c78c9"} Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.850964 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtc"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.880596 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-cjtzs"] Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.918138 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:21 crc kubenswrapper[4948]: E0122 14:21:21.918430 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.418419262 +0000 UTC m=+140.748092191 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.930735 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" event={"ID":"6f9164f3-da46-447e-94fd-ffda5fb2f565","Type":"ContainerStarted","Data":"d9cdfa171a63c36a994b723dd1861d1d1d7bb7cb2483fe357883f3c96987111f"} Jan 22 14:21:21 crc kubenswrapper[4948]: I0122 14:21:21.941589 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-qjjhb"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.005485 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" event={"ID":"336a185f-a030-45f8-93f9-be33304506e1","Type":"ContainerStarted","Data":"9f8e541da53da43cabc6fa2ea0621734c8dfa2ae4a4b7383faec78a5f11e6ec5"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.005919 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" event={"ID":"1a92e7d6-836a-4227-9ee8-3db4353fdfa1","Type":"ContainerStarted","Data":"37d96e37e0a0e4fccb119a1b76765a1cd65524bfe5c54946cb42064dcc5f2674"} Jan 22 14:21:22 crc kubenswrapper[4948]: W0122 14:21:22.017657 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca154693_ba02_4d7b_a4b6_45fee35a0280.slice/crio-9cac8e2577df4e99c5a092c18cfbb4ca2c4f4f9c41f8e1b5b25d426c8431dc27 WatchSource:0}: Error finding container 9cac8e2577df4e99c5a092c18cfbb4ca2c4f4f9c41f8e1b5b25d426c8431dc27: Status 404 returned error can't find the container with id 9cac8e2577df4e99c5a092c18cfbb4ca2c4f4f9c41f8e1b5b25d426c8431dc27 Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.018294 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-xgx8j" event={"ID":"cfbebb9e-7111-4271-bff9-e663d96e586e","Type":"ContainerStarted","Data":"a1f4025595d054f54a7ab50d45be9bf77a4afb1b9d4f18d2be2fb4df8d914446"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.019431 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.019763 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.519748297 +0000 UTC m=+140.849421226 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.066427 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" event={"ID":"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf","Type":"ContainerStarted","Data":"d31d981e3d54203e5dd643591038d58a4bcaaaf12ad33cf2a13947f236ad12be"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.128959 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.129646 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.629630023 +0000 UTC m=+140.959302962 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.179043 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.200447 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" event={"ID":"a316cc87-51ce-49fd-b81c-fab7d253dea6","Type":"ContainerStarted","Data":"8c960bb82c7cff619f8118bfd32e8f9a69a0cf6ec3a4f455972c0047d65c3fce"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.210322 4948 generic.go:334] "Generic (PLEG): container finished" podID="78f96e55-8d97-4d74-8a95-44356bb1c0b1" containerID="a2768b26b0d16b28e5ae9a9a152a0de37591d8080e08d1b0ff8a93b7f645f4ef" exitCode=0 Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.210758 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" event={"ID":"78f96e55-8d97-4d74-8a95-44356bb1c0b1","Type":"ContainerDied","Data":"a2768b26b0d16b28e5ae9a9a152a0de37591d8080e08d1b0ff8a93b7f645f4ef"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.228701 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" event={"ID":"3722b0ae-dab1-4301-b98e-de27b27ab754","Type":"ContainerStarted","Data":"9f2b91317b674fcf0e91ce0236e63eb1cae4c010dc197082f27eee58d69f56fe"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.232655 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.233106 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.733090385 +0000 UTC m=+141.062763314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.249938 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-4mvfm" event={"ID":"bd18b7ad-276e-4008-85c7-e4680c574d27","Type":"ContainerStarted","Data":"2dedb951bc6df8aba463dec03c0000ccd3e7f076478a9992f0439863cfa3c7d2"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.249983 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.264728 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" event={"ID":"d6d99019-a94b-4736-b5f4-496d973e0bc8","Type":"ContainerStarted","Data":"49e3999c14500f7395db8b2a20593dc3e48ffc8dcc6e15f666739ede206a16b9"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.273638 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.279491 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-4mvfm" Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.290752 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" event={"ID":"deeb21d3-29ae-4db0-a982-461ee3d6cd4b","Type":"ContainerStarted","Data":"3d48e0c4e31565c3cbc134db84a81fe7fd389268d093e30004c96ea7e9e4a7f8"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.294808 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" event={"ID":"0a5509fc-2393-4932-be1c-6b9379065100","Type":"ContainerStarted","Data":"57c742a19e0a7d88dd84d14bf4f3b98619ddd72a9692be4ca3a1b83a627ca4b5"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.303078 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-28tff" event={"ID":"85276719-eead-4d75-9df7-219df59cfd40","Type":"ContainerStarted","Data":"19fa86ee0e03de13e7c544a0d9f92b2d45fd73c436d35907c4c56d0d4ebcae09"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.334112 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.334419 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.834404938 +0000 UTC m=+141.164077867 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.334657 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-4mvfm" podStartSLOduration=121.334636495 podStartE2EDuration="2m1.334636495s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:22.289538168 +0000 UTC m=+140.619211097" watchObservedRunningTime="2026-01-22 14:21:22.334636495 +0000 UTC m=+140.664309424" Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.357700 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-s4jms"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.368547 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" event={"ID":"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6","Type":"ContainerStarted","Data":"48ab9e553ceeb06e5cf1a69c4535072ac97e1e436904cfb949fe585162cd3308"} Jan 22 14:21:22 crc kubenswrapper[4948]: W0122 14:21:22.377721 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6187e924_4baa_4a90_a669_891278f2e23d.slice/crio-cd3e9e3466d1960fcaaf14733c84b9e9b906d85921bfc5c480a9541deb57b942 WatchSource:0}: Error finding container cd3e9e3466d1960fcaaf14733c84b9e9b906d85921bfc5c480a9541deb57b942: Status 404 returned error can't find the container with id cd3e9e3466d1960fcaaf14733c84b9e9b906d85921bfc5c480a9541deb57b942 Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.434812 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-8q98t"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.435977 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.437453 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:22.937435918 +0000 UTC m=+141.267108847 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.455498 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" podStartSLOduration=121.455460528 podStartE2EDuration="2m1.455460528s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:22.379207376 +0000 UTC m=+140.708880305" watchObservedRunningTime="2026-01-22 14:21:22.455460528 +0000 UTC m=+140.785133457" Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.484740 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" event={"ID":"2314b196-d44a-487b-b9db-e0396b172957","Type":"ContainerStarted","Data":"d4ab31731798ac231415ef6ead201ed1f50228fc5e57f0857a7935d4a153c0b5"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.513265 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" event={"ID":"7ba1d16e-01d0-42f5-b3ef-9196a8a11197","Type":"ContainerStarted","Data":"45498888db58c1168f266eeed763317953b5b0c04adf452fa476410bd9c6e86a"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.514518 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" podStartSLOduration=121.514494533 podStartE2EDuration="2m1.514494533s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:22.509537218 +0000 UTC m=+140.839210147" watchObservedRunningTime="2026-01-22 14:21:22.514494533 +0000 UTC m=+140.844167462" Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.540609 4948 generic.go:334] "Generic (PLEG): container finished" podID="b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0" containerID="549f0447c6d74fc9b7f1b1c88f56d8a84a50b98a06bd659e09f6bc1c0f384464" exitCode=0 Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.542924 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" event={"ID":"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0","Type":"ContainerDied","Data":"549f0447c6d74fc9b7f1b1c88f56d8a84a50b98a06bd659e09f6bc1c0f384464"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.543727 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.543987 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.043975694 +0000 UTC m=+141.373648623 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: W0122 14:21:22.559916 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad723228_be0e_414d_a738_9ae3a916bef7.slice/crio-a90379b862d885c4c32c4f82c604e5fc4c463535d613fb1635f1e556fefba83e WatchSource:0}: Error finding container a90379b862d885c4c32c4f82c604e5fc4c463535d613fb1635f1e556fefba83e: Status 404 returned error can't find the container with id a90379b862d885c4c32c4f82c604e5fc4c463535d613fb1635f1e556fefba83e Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.563628 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" event={"ID":"3b860e63-df81-4b66-a052-835876d6cb58","Type":"ContainerStarted","Data":"8b9c44bba5f6f79d74879e343d0e29ebf90f35dc54baf4018318baf53ef79c89"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.566313 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" event={"ID":"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76","Type":"ContainerStarted","Data":"0f95383adbaa2affe9eb294e32a88f5b00c39f7b758f7ec6fd299adcc8466110"} Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.604100 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-7kdkq"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.614657 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-jwzjg"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.650643 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.651746 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.151729543 +0000 UTC m=+141.481402472 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.671503 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.671583 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.671485 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" podStartSLOduration=121.671465409 podStartE2EDuration="2m1.671465409s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:22.652958686 +0000 UTC m=+140.982631615" watchObservedRunningTime="2026-01-22 14:21:22.671465409 +0000 UTC m=+141.001138338" Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.706394 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-t6lsv"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.708617 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-pz6k6" podStartSLOduration=121.708586498 podStartE2EDuration="2m1.708586498s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:22.695739738 +0000 UTC m=+141.025412667" watchObservedRunningTime="2026-01-22 14:21:22.708586498 +0000 UTC m=+141.038259427" Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.753840 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.754224 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.254209398 +0000 UTC m=+141.583882327 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.763818 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-9svkn"] Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.854512 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.854944 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.354912925 +0000 UTC m=+141.684585854 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.855023 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.855405 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.355397909 +0000 UTC m=+141.685070838 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:22 crc kubenswrapper[4948]: I0122 14:21:22.961672 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:22 crc kubenswrapper[4948]: E0122 14:21:22.962154 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.462128089 +0000 UTC m=+141.791801018 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.063359 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.063799 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.563781081 +0000 UTC m=+141.893454010 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.165155 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.165328 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.665300361 +0000 UTC m=+141.994973300 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.165561 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.165976 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.665959738 +0000 UTC m=+141.995632667 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.268445 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.268718 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.768691391 +0000 UTC m=+142.098364320 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.269110 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.269435 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.76942317 +0000 UTC m=+142.099096099 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.370811 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.371273 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.871247688 +0000 UTC m=+142.200920617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.475493 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.476183 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:23.97615901 +0000 UTC m=+142.305831939 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.577513 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.578064 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.078047409 +0000 UTC m=+142.407720338 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.591441 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" event={"ID":"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8","Type":"ContainerStarted","Data":"7f1f7ee8948f0e7abdd5df146cf2c04a63c14f5b02021abf592fda020b3839cb"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.602581 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" event={"ID":"0a5509fc-2393-4932-be1c-6b9379065100","Type":"ContainerStarted","Data":"7d29c6ea62956a4df3753cdb437dc86a70795de28747960449c33cee1430caee"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.623772 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" event={"ID":"fcb187b2-3022-41a1-a9da-0cb254b18c16","Type":"ContainerStarted","Data":"a7f5cb02cf68173f977cb14ee614f132abcb0bf20b6be051fe54c0d89baa9a45"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.624447 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.634057 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" event={"ID":"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf","Type":"ContainerStarted","Data":"cea2b0c502cc554a6d65a1a85d0e2cc152480d0d8fa5d5716c3e65e4a6b57eed"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.634520 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-sv2zr" podStartSLOduration=122.634503453 podStartE2EDuration="2m2.634503453s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:23.632314893 +0000 UTC m=+141.961987822" watchObservedRunningTime="2026-01-22 14:21:23.634503453 +0000 UTC m=+141.964176372" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.641853 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.646522 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" event={"ID":"fec867e7-c935-489e-b5e9-70d0cf71413b","Type":"ContainerStarted","Data":"1f8b9e6d31ab63e0d00ff4afff589ff2580065e193bf8787cec802394c72f2e6"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.652454 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" event={"ID":"8a187fdb-fd89-404c-b194-00ef10ec1a01","Type":"ContainerStarted","Data":"645301b8997511d51c9980d628a9d306eaa95bb2ec0ab423fbc2c79d111ba5c0"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.655542 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" event={"ID":"3b860e63-df81-4b66-a052-835876d6cb58","Type":"ContainerStarted","Data":"9e167e7e01ed005ca58ce8f2ec572e1e167f8f65b5260b642408a8cb66446895"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.665253 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" event={"ID":"336a185f-a030-45f8-93f9-be33304506e1","Type":"ContainerStarted","Data":"83b7b147dda71c2b98c7ccbfb8974b5cda7de924f95f55f00915d2cbefe4b4e7"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.667675 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-zx2x2" podStartSLOduration=122.667664295 podStartE2EDuration="2m2.667664295s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:23.666582335 +0000 UTC m=+141.996255264" watchObservedRunningTime="2026-01-22 14:21:23.667664295 +0000 UTC m=+141.997337224" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.678760 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.683312 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.183290769 +0000 UTC m=+142.512963698 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.698259 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" event={"ID":"e31f1c5f-27f0-4742-a04b-91544011bdad","Type":"ContainerStarted","Data":"668027e2284cfba7850e55344921784aacd90694006824f4934121d11c8ed6b2"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.730386 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" event={"ID":"e1523dba-d641-467b-96eb-521434ddfa75","Type":"ContainerStarted","Data":"3863ad77283c0691900e32433ad3a4a0d012f3b2f800ff51722aed870cb418c5"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.730428 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" event={"ID":"e1523dba-d641-467b-96eb-521434ddfa75","Type":"ContainerStarted","Data":"5bc11471bdc1001362f4166a00380a438222d995405079c5dd7b59b4bd4c227a"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.731245 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.747665 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-g5zjf" podStartSLOduration=121.747647338 podStartE2EDuration="2m1.747647338s" podCreationTimestamp="2026-01-22 14:19:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:23.745677004 +0000 UTC m=+142.075349933" watchObservedRunningTime="2026-01-22 14:21:23.747647338 +0000 UTC m=+142.077320267" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.770960 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" event={"ID":"66ee3af7-a820-42da-8190-e84882cee1d4","Type":"ContainerStarted","Data":"76e2ff5a7641d5a5608066f238e68d10a119c906838ebfcf6e2cbf4e4d3b83f7"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.771038 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" event={"ID":"66ee3af7-a820-42da-8190-e84882cee1d4","Type":"ContainerStarted","Data":"8680570e1fb30a8570ce139145d92691879e1f6579e85fb5aa15619160f56b05"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.772999 4948 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-5jkws container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.773149 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" podUID="e1523dba-d641-467b-96eb-521434ddfa75" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.20:8443/healthz\": dial tcp 10.217.0.20:8443: connect: connection refused" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.788391 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.792244 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.29221995 +0000 UTC m=+142.621892879 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.838950 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-6bb9s" podStartSLOduration=122.83893281900001 podStartE2EDuration="2m2.838932819s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:23.836150374 +0000 UTC m=+142.165823313" watchObservedRunningTime="2026-01-22 14:21:23.838932819 +0000 UTC m=+142.168605748" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.852538 4948 generic.go:334] "Generic (PLEG): container finished" podID="1cb99e0f-ec79-4e50-9284-f161cfe70a93" containerID="22d18d66308f31ee300ad7365c1d562475bfcd22b0d67b6a4cef16b76abcd76f" exitCode=0 Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.852643 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" event={"ID":"1cb99e0f-ec79-4e50-9284-f161cfe70a93","Type":"ContainerDied","Data":"22d18d66308f31ee300ad7365c1d562475bfcd22b0d67b6a4cef16b76abcd76f"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.859218 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" event={"ID":"8c9115ab-73ac-4742-96e7-041345c9c222","Type":"ContainerStarted","Data":"4a2fdb31b383e5e0c7962401fb0b24fcadadee29e38ec34d5b024b5d9437afa4"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.900974 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:23 crc kubenswrapper[4948]: E0122 14:21:23.901320 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.401306995 +0000 UTC m=+142.730979924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.922000 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" event={"ID":"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6","Type":"ContainerStarted","Data":"fc24d6e9050886c77a04f9569e590982da2bbbddd6e021cf5a85ef331a7b6fa9"} Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.922556 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.929796 4948 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-zq2mw container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" start-of-body= Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.929996 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" Jan 22 14:21:23 crc kubenswrapper[4948]: I0122 14:21:23.972042 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qxj2h" podStartSLOduration=122.972024856 podStartE2EDuration="2m2.972024856s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:23.874394153 +0000 UTC m=+142.204067082" watchObservedRunningTime="2026-01-22 14:21:23.972024856 +0000 UTC m=+142.301697785" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.007534 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.007676 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" event={"ID":"78f96e55-8d97-4d74-8a95-44356bb1c0b1","Type":"ContainerStarted","Data":"d969455b2a31b748a392d6add908bb83b899a9389d939ae3ef462f1e50a8b187"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.007713 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.007950 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.507932843 +0000 UTC m=+142.837605772 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.054624 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" podStartSLOduration=123.054610531 podStartE2EDuration="2m3.054610531s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.051403564 +0000 UTC m=+142.381076483" watchObservedRunningTime="2026-01-22 14:21:24.054610531 +0000 UTC m=+142.384283460" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.078127 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" event={"ID":"2314b196-d44a-487b-b9db-e0396b172957","Type":"ContainerStarted","Data":"2117096d8c96d556464c0ecff73e591cc95bd4773ed7028e4445fe14c0fd5642"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.079037 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.101206 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" event={"ID":"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b","Type":"ContainerStarted","Data":"3c6d5788b32ffda6706718fd35efd47992f45e9390daf29e9d2b0351011268b0"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.101248 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" event={"ID":"859c9f3a-e3af-45a7-bc7c-ea5838c24a4b","Type":"ContainerStarted","Data":"ede9a597e664d238c28ee025dccdcf07f802c8b7d48b01d987fb2cc4f88e009e"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.109903 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.112066 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.612033112 +0000 UTC m=+142.941706041 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.123415 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.145453 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-28tff" event={"ID":"85276719-eead-4d75-9df7-219df59cfd40","Type":"ContainerStarted","Data":"6880fc3480b668ed0502251fe448e1eeefa866b346101951ff7ee984d3827d54"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.162660 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" podStartSLOduration=123.162640257 podStartE2EDuration="2m3.162640257s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.161978889 +0000 UTC m=+142.491651808" watchObservedRunningTime="2026-01-22 14:21:24.162640257 +0000 UTC m=+142.492313186" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.175813 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7kdkq" event={"ID":"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1","Type":"ContainerStarted","Data":"ee40a7acc64432030a922ff4475d3c5efe0618937c22b54e06e88327af91ad38"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.177154 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-s4jms" event={"ID":"a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad","Type":"ContainerStarted","Data":"4e70bd08b49796d6d22d3859931d3e8a7e5a718ec69dc9c8bd6a99f60c6ed361"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.213511 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.214957 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.714941829 +0000 UTC m=+143.044614758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.216843 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" event={"ID":"33ae6fda-0f24-436c-b813-e01bb556110e","Type":"ContainerStarted","Data":"f689e7c4b2708ca132ab141bd23de4bacc9828f10af7fa7d58d948c7330e113a"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.216884 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" event={"ID":"33ae6fda-0f24-436c-b813-e01bb556110e","Type":"ContainerStarted","Data":"0eb686d015c4076c3fb86fcb3a49352b1d99a48a7fffaf54dc79b9a1e7eecdfa"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.218303 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.238922 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" event={"ID":"6187e924-4baa-4a90-a669-891278f2e23d","Type":"ContainerStarted","Data":"cd3e9e3466d1960fcaaf14733c84b9e9b906d85921bfc5c480a9541deb57b942"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.250685 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" podStartSLOduration=123.250667989 podStartE2EDuration="2m3.250667989s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.248138671 +0000 UTC m=+142.577811600" watchObservedRunningTime="2026-01-22 14:21:24.250667989 +0000 UTC m=+142.580340928" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.325391 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" event={"ID":"1c262802-72e7-4bc1-92bd-bf8b5551cade","Type":"ContainerStarted","Data":"86bd0acbee9bf9def2eb44349fc9de8c7aed921ad39218e891c73a623ed5d357"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.325472 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" event={"ID":"1c262802-72e7-4bc1-92bd-bf8b5551cade","Type":"ContainerStarted","Data":"e7000c65eef7f0e1d44a8c87a4867c7b9089123fdd373b6f1820062be8014ffa"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.327877 4948 csr.go:261] certificate signing request csr-tt7q8 is approved, waiting to be issued Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.327908 4948 csr.go:257] certificate signing request csr-tt7q8 is issued Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.329680 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.332226 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.832204215 +0000 UTC m=+143.161877144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.377210 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" event={"ID":"deeb21d3-29ae-4db0-a982-461ee3d6cd4b","Type":"ContainerStarted","Data":"3650161a4c1baeaf0987d8b522aa34c89cba66d6f3f1ccf439c3be4a1a8906fc"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.392678 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" podStartSLOduration=123.392659988 podStartE2EDuration="2m3.392659988s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.327667962 +0000 UTC m=+142.657340891" watchObservedRunningTime="2026-01-22 14:21:24.392659988 +0000 UTC m=+142.722332917" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.407434 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-xgx8j" event={"ID":"cfbebb9e-7111-4271-bff9-e663d96e586e","Type":"ContainerStarted","Data":"98f44384981d5b8a28bafd1e46fb58845117eecff17501ec08930e67284468be"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.438000 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.438846 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:24.938829773 +0000 UTC m=+143.268502702 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.453658 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" event={"ID":"a316cc87-51ce-49fd-b81c-fab7d253dea6","Type":"ContainerStarted","Data":"c4518be540fc2e170c8abde4c85d174e8d09dcd034f47b81e6e011feb91c5b99"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.454765 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-28tff" podStartSLOduration=7.454743716 podStartE2EDuration="7.454743716s" podCreationTimestamp="2026-01-22 14:21:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.453730829 +0000 UTC m=+142.783403758" watchObservedRunningTime="2026-01-22 14:21:24.454743716 +0000 UTC m=+142.784416645" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.520387 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-9f5vd" event={"ID":"d6d99019-a94b-4736-b5f4-496d973e0bc8","Type":"ContainerStarted","Data":"73ee168ddf0bc8217927c3f9307bcf2b525457a9734f6a43d8640392edae30e0"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.539862 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.540200 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.040188219 +0000 UTC m=+143.369861148 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.546276 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" event={"ID":"ad723228-be0e-414d-a738-9ae3a916bef7","Type":"ContainerStarted","Data":"a90379b862d885c4c32c4f82c604e5fc4c463535d613fb1635f1e556fefba83e"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.612002 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-ps89b" podStartSLOduration=123.61198406 podStartE2EDuration="2m3.61198406s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.597493016 +0000 UTC m=+142.927165945" watchObservedRunningTime="2026-01-22 14:21:24.61198406 +0000 UTC m=+142.941656989" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.612700 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-qjjhb" podStartSLOduration=122.612694019 podStartE2EDuration="2m2.612694019s" podCreationTimestamp="2026-01-22 14:19:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.536503408 +0000 UTC m=+142.866176337" watchObservedRunningTime="2026-01-22 14:21:24.612694019 +0000 UTC m=+142.942366948" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.642547 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.643110 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.143078344 +0000 UTC m=+143.472751273 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.649055 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" event={"ID":"68335fcc-2687-4bf0-8a43-f4e3d3c4702e","Type":"ContainerStarted","Data":"fc2d29fa7b8c0139be7705c7e7b73b2b9417a0bf4b953a7d3c47cffbdc4d0d65"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.649125 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" event={"ID":"68335fcc-2687-4bf0-8a43-f4e3d3c4702e","Type":"ContainerStarted","Data":"178dc860d968f213661364c00c3849ee7db20b034a28e757a7905cf5c475f584"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.709649 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-xgx8j" podStartSLOduration=123.709631094 podStartE2EDuration="2m3.709631094s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.657095245 +0000 UTC m=+142.986768174" watchObservedRunningTime="2026-01-22 14:21:24.709631094 +0000 UTC m=+143.039304023" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.718407 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-qxjql" podStartSLOduration=123.718394512 podStartE2EDuration="2m3.718394512s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.716081678 +0000 UTC m=+143.045754607" watchObservedRunningTime="2026-01-22 14:21:24.718394512 +0000 UTC m=+143.048067441" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.720562 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" event={"ID":"12fa60a3-783c-4015-b759-ab636fd6e559","Type":"ContainerStarted","Data":"d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.720609 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" event={"ID":"12fa60a3-783c-4015-b759-ab636fd6e559","Type":"ContainerStarted","Data":"9f2cdfb8d5e32ab42afed652f3b9322b6570a8219dd0a57ed458d8e91e8bb306"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.726132 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.733358 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9svkn" event={"ID":"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c","Type":"ContainerStarted","Data":"0ab881f9af61d792c7d26a4b64f999283b944fcf42b771e414e04cc0fd1a0742"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.738412 4948 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-7cjtc container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" start-of-body= Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.738462 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.38:8080/healthz\": dial tcp 10.217.0.38:8080: connect: connection refused" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.744279 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.745344 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.245331063 +0000 UTC m=+143.575003992 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.755839 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" event={"ID":"627c131b-4a05-4674-8889-fba91fd90ec7","Type":"ContainerStarted","Data":"af974d721d53c01ba343a4a9f7430dc66b90df883fbc701b6a6d61ad199e812d"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.755897 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" event={"ID":"627c131b-4a05-4674-8889-fba91fd90ec7","Type":"ContainerStarted","Data":"738b236827f03256f296878689c4d0997d3b06bb119eddc130ea841eb94d40f9"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.787828 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" podStartSLOduration=123.787813259 podStartE2EDuration="2m3.787813259s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.785843475 +0000 UTC m=+143.115516414" watchObservedRunningTime="2026-01-22 14:21:24.787813259 +0000 UTC m=+143.117486188" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.788920 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" event={"ID":"1a92e7d6-836a-4227-9ee8-3db4353fdfa1","Type":"ContainerStarted","Data":"db8010648b961e969bcac7b8bd8c1ae9a10116f4b455e27df5db32c66cd2ce18"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.826455 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-f4955" podStartSLOduration=123.826438258 podStartE2EDuration="2m3.826438258s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.824371822 +0000 UTC m=+143.154044751" watchObservedRunningTime="2026-01-22 14:21:24.826438258 +0000 UTC m=+143.156111177" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.828546 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t6lsv" event={"ID":"90f993a5-e187-4e11-9295-fba46ad3f759","Type":"ContainerStarted","Data":"904768f5895429607e454704439f1e5770c62d8d0df1c9e15adf4472dd57107c"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.850524 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.852573 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.352557598 +0000 UTC m=+143.682230527 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.861849 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-zfs47" podStartSLOduration=123.861831091 podStartE2EDuration="2m3.861831091s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.861110261 +0000 UTC m=+143.190783190" watchObservedRunningTime="2026-01-22 14:21:24.861831091 +0000 UTC m=+143.191504020" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.886085 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" event={"ID":"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76","Type":"ContainerStarted","Data":"9cf6c38ac4e9d3cecd56c20c0369e7cbccd2fa6910f2b55141363e7d07484f02"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.888248 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-t6lsv" podStartSLOduration=7.888236777 podStartE2EDuration="7.888236777s" podCreationTimestamp="2026-01-22 14:21:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.885851273 +0000 UTC m=+143.215524202" watchObservedRunningTime="2026-01-22 14:21:24.888236777 +0000 UTC m=+143.217909696" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.913781 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" event={"ID":"ca154693-ba02-4d7b-a4b6-45fee35a0280","Type":"ContainerStarted","Data":"9cac8e2577df4e99c5a092c18cfbb4ca2c4f4f9c41f8e1b5b25d426c8431dc27"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.924176 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" podStartSLOduration=123.924156284 podStartE2EDuration="2m3.924156284s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.919781895 +0000 UTC m=+143.249454824" watchObservedRunningTime="2026-01-22 14:21:24.924156284 +0000 UTC m=+143.253829213" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.951407 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" event={"ID":"6f9164f3-da46-447e-94fd-ffda5fb2f565","Type":"ContainerStarted","Data":"ac548c36f572a0211b064435b47eec213eafdfeb0722ac52fa3d5f420f660826"} Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.953170 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:24 crc kubenswrapper[4948]: E0122 14:21:24.969378 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.469342702 +0000 UTC m=+143.799015631 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.989584 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-x5qwp" podStartSLOduration=123.989544241 podStartE2EDuration="2m3.989544241s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:24.986347584 +0000 UTC m=+143.316020513" watchObservedRunningTime="2026-01-22 14:21:24.989544241 +0000 UTC m=+143.319217170" Jan 22 14:21:24 crc kubenswrapper[4948]: I0122 14:21:24.995486 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" event={"ID":"3722b0ae-dab1-4301-b98e-de27b27ab754","Type":"ContainerStarted","Data":"8542f10d813415f35e1b287d255ea12c1f48b2d7f54c38e900f5a4f1d1e39712"} Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.054985 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.055308 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.555261268 +0000 UTC m=+143.884934197 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.055454 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.055890 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.555882004 +0000 UTC m=+143.885554933 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.155836 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.156144 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.656126989 +0000 UTC m=+143.985799918 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.218557 4948 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-6j7zl container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.218626 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" podUID="33ae6fda-0f24-436c-b813-e01bb556110e" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.34:5443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.257308 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.258510 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.758481321 +0000 UTC m=+144.088154250 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.329181 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-01-22 14:16:24 +0000 UTC, rotation deadline is 2026-10-15 02:57:23.668919699 +0000 UTC Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.329217 4948 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6372h35m58.339705156s for next certificate rotation Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.358569 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.358918 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.85889654 +0000 UTC m=+144.188569469 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.359055 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.359388 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.859381603 +0000 UTC m=+144.189054532 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.376567 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.382513 4948 patch_prober.go:28] interesting pod/router-default-5444994796-xgx8j container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:21:25 crc kubenswrapper[4948]: [-]has-synced failed: reason withheld Jan 22 14:21:25 crc kubenswrapper[4948]: [+]process-running ok Jan 22 14:21:25 crc kubenswrapper[4948]: healthz check failed Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.382579 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xgx8j" podUID="cfbebb9e-7111-4271-bff9-e663d96e586e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.459767 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.459974 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.959937026 +0000 UTC m=+144.289609965 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.460054 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.460396 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:25.960378068 +0000 UTC m=+144.290050997 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.561118 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.561377 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.061345802 +0000 UTC m=+144.391018731 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.561787 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.562190 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.062180465 +0000 UTC m=+144.391853474 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.662668 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.662807 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.162781529 +0000 UTC m=+144.492454458 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.662887 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.663236 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.163221161 +0000 UTC m=+144.492894090 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.763703 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.763902 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.263871947 +0000 UTC m=+144.593544886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.764076 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.764514 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.264500883 +0000 UTC m=+144.594173812 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.870948 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.871942 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.371896372 +0000 UTC m=+144.701569301 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:25 crc kubenswrapper[4948]: I0122 14:21:25.972539 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:25 crc kubenswrapper[4948]: E0122 14:21:25.972934 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.472916598 +0000 UTC m=+144.802589527 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.026934 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9svkn" event={"ID":"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c","Type":"ContainerStarted","Data":"b51ae8050f27cc517134a3926252a5e0c8c08eed3ecc3921f7187a9348c8f62c"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.026991 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-9svkn" event={"ID":"0a9e9dc5-0fe4-493d-9d50-c92c3e7a983c","Type":"ContainerStarted","Data":"7876ba21cbe856bba63a982cdf8086a3a2a275246ff9ba7a85b93cfcc2df6cfe"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.028113 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.036401 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-t6lsv" event={"ID":"90f993a5-e187-4e11-9295-fba46ad3f759","Type":"ContainerStarted","Data":"be3354ab19304df296e703e156d7a87db325ba163ba39814e9a3db6c9e8f5c67"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.038236 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" event={"ID":"ca154693-ba02-4d7b-a4b6-45fee35a0280","Type":"ContainerStarted","Data":"d5adc7af45b0d81e9b51b49ad9d56dc73e5e6aea1fc2188cd160eca1f9096a25"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.038260 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" event={"ID":"ca154693-ba02-4d7b-a4b6-45fee35a0280","Type":"ContainerStarted","Data":"b1ab860bb169d8a892c07f59bc22b97dc9b0fa7f0218bbc7a9c329cd192227b8"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.041418 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" event={"ID":"a316cc87-51ce-49fd-b81c-fab7d253dea6","Type":"ContainerStarted","Data":"54797305eb40289885dc2ed26318fce753ecfc8509bf0daddc80d3319829c16d"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.049406 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" event={"ID":"ad723228-be0e-414d-a738-9ae3a916bef7","Type":"ContainerStarted","Data":"4a6a171216eb0f927601d490910dbe42576c54c8b061775a604503428e0b915d"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.049445 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" event={"ID":"ad723228-be0e-414d-a738-9ae3a916bef7","Type":"ContainerStarted","Data":"0331d242c03537220a86f39b1a1be91bc1b1f6d0bbff7ea3dece82a06e76d26b"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.054842 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-9svkn" podStartSLOduration=9.054822694 podStartE2EDuration="9.054822694s" podCreationTimestamp="2026-01-22 14:21:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.053145568 +0000 UTC m=+144.382818497" watchObservedRunningTime="2026-01-22 14:21:26.054822694 +0000 UTC m=+144.384495623" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.064658 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" event={"ID":"68335fcc-2687-4bf0-8a43-f4e3d3c4702e","Type":"ContainerStarted","Data":"633441e7d31fce957d001e440c0624ac3af17b98c510f783df52908bd8ce359c"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.064805 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.071780 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" event={"ID":"e31f1c5f-27f0-4742-a04b-91544011bdad","Type":"ContainerStarted","Data":"0d9293d6beeffe2148a6bf6535cbd0f07788d40a1840cda11db2039a01b0583c"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.073967 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.074120 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.574073127 +0000 UTC m=+144.903746056 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.075211 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.076282 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.576262857 +0000 UTC m=+144.905935786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.083688 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" event={"ID":"8c9115ab-73ac-4742-96e7-041345c9c222","Type":"ContainerStarted","Data":"a31c5937866ea4827d7b81290b254bc75fae7b25e0825820d3dabeeff9048ea3"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.093678 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-6whzf" podStartSLOduration=125.093661979 podStartE2EDuration="2m5.093661979s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.088979602 +0000 UTC m=+144.418652531" watchObservedRunningTime="2026-01-22 14:21:26.093661979 +0000 UTC m=+144.423334898" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.098343 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bbmbn" event={"ID":"f2c0d1d9-76f0-45bb-8a15-88a0876d5b76","Type":"ContainerStarted","Data":"d609f103371486fe609951bb5ac164fe2032a018424c1d2d808eb6bfc80849cf"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.100879 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-7kdkq" event={"ID":"5defb8aa-bd1f-43d9-a86d-0026afa4f8d1","Type":"ContainerStarted","Data":"618d6005163a1117498a7264c9d9b9e151226b9472d2c80ddf4437db821d3439"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.102294 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" event={"ID":"fec867e7-c935-489e-b5e9-70d0cf71413b","Type":"ContainerStarted","Data":"eb43f0243d3cfb1b9f60d4e6368f173cdda599c17c5e0080d4876ef6d92b8d20"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.102466 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.106067 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" event={"ID":"1cb99e0f-ec79-4e50-9284-f161cfe70a93","Type":"ContainerStarted","Data":"06fc71b5d22074ba98c3595d875ca430462c9eaaf67378073b959c57863c970d"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.110986 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.113102 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" event={"ID":"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0","Type":"ContainerStarted","Data":"0d3f20736faaf23b350757a0e3998312081ffbb66a9ff9d30068a0c0cd09a41a"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.113132 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" event={"ID":"b8b97979-5e3e-4a54-8a0c-a6de0b92fcc0","Type":"ContainerStarted","Data":"b60cf681463dae0a63b5bae0120e9ee15fdfb6e7967c8cc73b511fe870d30abe"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.117058 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" event={"ID":"6187e924-4baa-4a90-a669-891278f2e23d","Type":"ContainerStarted","Data":"aa4ea832c7532c954214b9c7d6b76e5657c0cc35dc884a0e8d36776186d651b0"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.117111 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" event={"ID":"6187e924-4baa-4a90-a669-891278f2e23d","Type":"ContainerStarted","Data":"2c8d8bb15366dcb2d3a2dc9d8f283d21073de009ec3416efadee4111d043afcc"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.145667 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-lt5mv" podStartSLOduration=125.145651982 podStartE2EDuration="2m5.145651982s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.14261728 +0000 UTC m=+144.472290209" watchObservedRunningTime="2026-01-22 14:21:26.145651982 +0000 UTC m=+144.475324911" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.147610 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" event={"ID":"4c1d05e6-fc11-4fb5-bf46-c346ded55ea8","Type":"ContainerStarted","Data":"f2a636aa0125511b08f9cb4c71e80a78bd5df1888ece5c49635c22f8893b319b"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.174034 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" event={"ID":"ce5de36e-7ed5-42bb-bf9d-28bf32c76edf","Type":"ContainerStarted","Data":"1dafe914dd8fc2b55fba2b73e014a950060420adb63531d1552c1ae6bf747755"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.181127 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.182748 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.68273057 +0000 UTC m=+145.012403499 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.200938 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-cjtzs" podStartSLOduration=125.200914544 podStartE2EDuration="2m5.200914544s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.199548857 +0000 UTC m=+144.529221786" watchObservedRunningTime="2026-01-22 14:21:26.200914544 +0000 UTC m=+144.530587473" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.217059 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-s4jms" event={"ID":"a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad","Type":"ContainerStarted","Data":"31a30fe91bba4e9b1b42d91d5d15fe122e36739ddac6bc6adf6b4078deb3e753"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.217105 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-s4jms" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.225646 4948 patch_prober.go:28] interesting pod/downloads-7954f5f757-s4jms container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.225702 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s4jms" podUID="a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.260166 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" event={"ID":"66ee3af7-a820-42da-8190-e84882cee1d4","Type":"ContainerStarted","Data":"0e98442e81cb947e82dcbcd6fe99de07d8025c55d1b4f9bcaacaabe652a9b6df"} Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.278200 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-6j7zl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.280295 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.285032 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.292066 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.792049381 +0000 UTC m=+145.121722310 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.300231 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-974mb" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.303542 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-5jkws" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.312616 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-jwzjg" podStartSLOduration=125.3125939 podStartE2EDuration="2m5.3125939s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.311773437 +0000 UTC m=+144.641446366" watchObservedRunningTime="2026-01-22 14:21:26.3125939 +0000 UTC m=+144.642266829" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.314211 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" podStartSLOduration=124.314205014 podStartE2EDuration="2m4.314205014s" podCreationTimestamp="2026-01-22 14:19:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.245600639 +0000 UTC m=+144.575273578" watchObservedRunningTime="2026-01-22 14:21:26.314205014 +0000 UTC m=+144.643877943" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.320619 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.375185 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" podStartSLOduration=125.37515214 podStartE2EDuration="2m5.37515214s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.37370305 +0000 UTC m=+144.703375979" watchObservedRunningTime="2026-01-22 14:21:26.37515214 +0000 UTC m=+144.704825069" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.387800 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.388138 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.888105672 +0000 UTC m=+145.217778601 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.395878 4948 patch_prober.go:28] interesting pod/router-default-5444994796-xgx8j container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:21:26 crc kubenswrapper[4948]: [-]has-synced failed: reason withheld Jan 22 14:21:26 crc kubenswrapper[4948]: [+]process-running ok Jan 22 14:21:26 crc kubenswrapper[4948]: healthz check failed Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.401576 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xgx8j" podUID="cfbebb9e-7111-4271-bff9-e663d96e586e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.459070 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-jkbsc" podStartSLOduration=125.45905143 podStartE2EDuration="2m5.45905143s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.420006619 +0000 UTC m=+144.749679548" watchObservedRunningTime="2026-01-22 14:21:26.45905143 +0000 UTC m=+144.788724359" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.482803 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" podStartSLOduration=124.482776415 podStartE2EDuration="2m4.482776415s" podCreationTimestamp="2026-01-22 14:19:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.455536935 +0000 UTC m=+144.785209864" watchObservedRunningTime="2026-01-22 14:21:26.482776415 +0000 UTC m=+144.812449344" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.489931 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.490299 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:26.99028557 +0000 UTC m=+145.319958499 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.569571 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-l9pc7" podStartSLOduration=125.569539633 podStartE2EDuration="2m5.569539633s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.568437063 +0000 UTC m=+144.898109992" watchObservedRunningTime="2026-01-22 14:21:26.569539633 +0000 UTC m=+144.899212562" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.571410 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ln6zk" podStartSLOduration=125.571402754 podStartE2EDuration="2m5.571402754s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.509820861 +0000 UTC m=+144.839493790" watchObservedRunningTime="2026-01-22 14:21:26.571402754 +0000 UTC m=+144.901075683" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.590869 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.591577 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.091550812 +0000 UTC m=+145.421223741 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.631279 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" podStartSLOduration=125.631261371 podStartE2EDuration="2m5.631261371s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.600001221 +0000 UTC m=+144.929674150" watchObservedRunningTime="2026-01-22 14:21:26.631261371 +0000 UTC m=+144.960934300" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.634337 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-7kdkq" podStartSLOduration=125.634329994 podStartE2EDuration="2m5.634329994s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.632095794 +0000 UTC m=+144.961768723" watchObservedRunningTime="2026-01-22 14:21:26.634329994 +0000 UTC m=+144.964002923" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.634997 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h27pl"] Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.635939 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.643522 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.659439 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h27pl"] Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.692891 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.692986 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-catalog-content\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.693036 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-utilities\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.693295 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mpgl\" (UniqueName: \"kubernetes.io/projected/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-kube-api-access-9mpgl\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.693443 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.193427701 +0000 UTC m=+145.523100630 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.762597 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-fkltl" podStartSLOduration=125.76257691 podStartE2EDuration="2m5.76257691s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:26.687993683 +0000 UTC m=+145.017666612" watchObservedRunningTime="2026-01-22 14:21:26.76257691 +0000 UTC m=+145.092249849" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.794374 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.794580 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-catalog-content\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.794605 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-utilities\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.794644 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mpgl\" (UniqueName: \"kubernetes.io/projected/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-kube-api-access-9mpgl\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.794896 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.294881958 +0000 UTC m=+145.624554887 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.795290 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-catalog-content\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.795503 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-utilities\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.849387 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mpgl\" (UniqueName: \"kubernetes.io/projected/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-kube-api-access-9mpgl\") pod \"certified-operators-h27pl\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.881840 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-mkngx"] Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.882737 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.897042 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.897102 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fvnj2\" (UniqueName: \"kubernetes.io/projected/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-kube-api-access-fvnj2\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.897140 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-catalog-content\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.897183 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-utilities\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:26 crc kubenswrapper[4948]: E0122 14:21:26.897480 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.397467886 +0000 UTC m=+145.727140815 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.900267 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mkngx"] Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.905463 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 14:21:26 crc kubenswrapper[4948]: I0122 14:21:26.979994 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.002704 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.002935 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.502888542 +0000 UTC m=+145.832561471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.003320 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.003370 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fvnj2\" (UniqueName: \"kubernetes.io/projected/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-kube-api-access-fvnj2\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.003402 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-catalog-content\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.003459 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-utilities\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.003923 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-utilities\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.004223 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.504209168 +0000 UTC m=+145.833882097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.004842 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-catalog-content\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.049752 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-p5mjq"] Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.050646 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.062046 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fvnj2\" (UniqueName: \"kubernetes.io/projected/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-kube-api-access-fvnj2\") pod \"community-operators-mkngx\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.104433 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.104615 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pgq9g\" (UniqueName: \"kubernetes.io/projected/a15930ec-26ea-4d6f-b1fc-2aa252298992-kube-api-access-pgq9g\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.104652 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-catalog-content\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.104676 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-utilities\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.104765 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p5mjq"] Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.104839 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.604823571 +0000 UTC m=+145.934496500 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.172656 4948 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.207859 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pgq9g\" (UniqueName: \"kubernetes.io/projected/a15930ec-26ea-4d6f-b1fc-2aa252298992-kube-api-access-pgq9g\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.208532 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-catalog-content\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.208617 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-utilities\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.208711 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.209079 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.709066665 +0000 UTC m=+146.038739594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.209935 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-catalog-content\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.210242 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-utilities\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.249436 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-s4jms" podStartSLOduration=126.249415272 podStartE2EDuration="2m6.249415272s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:27.121352331 +0000 UTC m=+145.451025260" watchObservedRunningTime="2026-01-22 14:21:27.249415272 +0000 UTC m=+145.579088201" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.255219 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.274973 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pgq9g\" (UniqueName: \"kubernetes.io/projected/a15930ec-26ea-4d6f-b1fc-2aa252298992-kube-api-access-pgq9g\") pod \"certified-operators-p5mjq\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.296498 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-vqmnb"] Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.304693 4948 patch_prober.go:28] interesting pod/downloads-7954f5f757-s4jms container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.304810 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s4jms" podUID="a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.305487 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" event={"ID":"e31f1c5f-27f0-4742-a04b-91544011bdad","Type":"ContainerStarted","Data":"5af0c22fe6a8a4cd94ec6275f793e13e034f2603df98893ee912a3f7db0cba84"} Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.305563 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.336800 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.337725 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.837707331 +0000 UTC m=+146.167380250 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.384219 4948 patch_prober.go:28] interesting pod/router-default-5444994796-xgx8j container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:21:27 crc kubenswrapper[4948]: [-]has-synced failed: reason withheld Jan 22 14:21:27 crc kubenswrapper[4948]: [+]process-running ok Jan 22 14:21:27 crc kubenswrapper[4948]: healthz check failed Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.384289 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xgx8j" podUID="cfbebb9e-7111-4271-bff9-e663d96e586e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.404628 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vqmnb"] Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.419948 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.439090 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pklq5\" (UniqueName: \"kubernetes.io/projected/57035aac-3298-49ff-990c-06d1c53f1e42-kube-api-access-pklq5\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.439236 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-utilities\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.439495 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.439546 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-catalog-content\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.447780 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:27.947757992 +0000 UTC m=+146.277431121 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.544967 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.545199 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pklq5\" (UniqueName: \"kubernetes.io/projected/57035aac-3298-49ff-990c-06d1c53f1e42-kube-api-access-pklq5\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.545224 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-utilities\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.545268 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-catalog-content\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.545643 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-catalog-content\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.545711 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:28.045694953 +0000 UTC m=+146.375367882 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.546632 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-utilities\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.614696 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pklq5\" (UniqueName: \"kubernetes.io/projected/57035aac-3298-49ff-990c-06d1c53f1e42-kube-api-access-pklq5\") pod \"community-operators-vqmnb\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.650523 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.651550 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:28.15152862 +0000 UTC m=+146.481201549 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.681432 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.763132 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.763614 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:28.263597556 +0000 UTC m=+146.593270495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.868149 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.868641 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:28.368624761 +0000 UTC m=+146.698297690 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.952438 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-mkngx"] Jan 22 14:21:27 crc kubenswrapper[4948]: I0122 14:21:27.976102 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:27 crc kubenswrapper[4948]: E0122 14:21:27.976598 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:28.476573365 +0000 UTC m=+146.806246294 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.078950 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:28 crc kubenswrapper[4948]: E0122 14:21:28.080340 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-01-22 14:21:28.580319774 +0000 UTC m=+146.909992703 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-5mc8q" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.173161 4948 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-01-22T14:21:27.172699006Z","Handler":null,"Name":""} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.179770 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:28 crc kubenswrapper[4948]: E0122 14:21:28.180298 4948 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-01-22 14:21:28.680280602 +0000 UTC m=+147.009953521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.184099 4948 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.184153 4948 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.217550 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h27pl"] Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.219649 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-vqmnb"] Jan 22 14:21:28 crc kubenswrapper[4948]: W0122 14:21:28.221886 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57035aac_3298_49ff_990c_06d1c53f1e42.slice/crio-4b30fcd584f0c87b31571e07cc5aa2cd6a5d5e71d77046554a8bdcd68341730d WatchSource:0}: Error finding container 4b30fcd584f0c87b31571e07cc5aa2cd6a5d5e71d77046554a8bdcd68341730d: Status 404 returned error can't find the container with id 4b30fcd584f0c87b31571e07cc5aa2cd6a5d5e71d77046554a8bdcd68341730d Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.281933 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.285091 4948 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.285161 4948 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.307305 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-p5mjq"] Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.314614 4948 generic.go:334] "Generic (PLEG): container finished" podID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerID="f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20" exitCode=0 Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.315146 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mkngx" event={"ID":"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce","Type":"ContainerDied","Data":"f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20"} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.315198 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mkngx" event={"ID":"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce","Type":"ContainerStarted","Data":"e464f339302b3f106264808b5b804b210b9fcb508d5f99800151d42be8a1693e"} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.316786 4948 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.321115 4948 generic.go:334] "Generic (PLEG): container finished" podID="3722b0ae-dab1-4301-b98e-de27b27ab754" containerID="8542f10d813415f35e1b287d255ea12c1f48b2d7f54c38e900f5a4f1d1e39712" exitCode=0 Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.321206 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" event={"ID":"3722b0ae-dab1-4301-b98e-de27b27ab754","Type":"ContainerDied","Data":"8542f10d813415f35e1b287d255ea12c1f48b2d7f54c38e900f5a4f1d1e39712"} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.323913 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" event={"ID":"e31f1c5f-27f0-4742-a04b-91544011bdad","Type":"ContainerStarted","Data":"c5358c660345007145b2a09ee3f7c776b56084617b3ca934d5e3db5f18b8ea5b"} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.323976 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" event={"ID":"e31f1c5f-27f0-4742-a04b-91544011bdad","Type":"ContainerStarted","Data":"8b5ea96cd77ba1de6e6f778f868abf16bd0b150994c83a0c50d646a0e24381b5"} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.327024 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqmnb" event={"ID":"57035aac-3298-49ff-990c-06d1c53f1e42","Type":"ContainerStarted","Data":"4b30fcd584f0c87b31571e07cc5aa2cd6a5d5e71d77046554a8bdcd68341730d"} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.329264 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h27pl" event={"ID":"8cc0468b-0cc6-43b6-b358-72c5cb9d6050","Type":"ContainerStarted","Data":"0cc0c199741569b31c3b36b87e6d4d180fcd7b52bfbe465b2e00265bf31c346a"} Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.330609 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-5mc8q\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.381347 4948 patch_prober.go:28] interesting pod/router-default-5444994796-xgx8j container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:21:28 crc kubenswrapper[4948]: [-]has-synced failed: reason withheld Jan 22 14:21:28 crc kubenswrapper[4948]: [+]process-running ok Jan 22 14:21:28 crc kubenswrapper[4948]: healthz check failed Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.381417 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xgx8j" podUID="cfbebb9e-7111-4271-bff9-e663d96e586e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.383775 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.398709 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.425919 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.629008 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-8q98t" podStartSLOduration=11.628985576 podStartE2EDuration="11.628985576s" podCreationTimestamp="2026-01-22 14:21:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:28.391705128 +0000 UTC m=+146.721378067" watchObservedRunningTime="2026-01-22 14:21:28.628985576 +0000 UTC m=+146.958658505" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.635172 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-5mc8q"] Jan 22 14:21:28 crc kubenswrapper[4948]: W0122 14:21:28.697306 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod05ff4ada_a73e_4df8_8ceb_fa133b52fa6e.slice/crio-4eea9a01aabdb32df16923f3bf1383567a5c5a05c97df009b46b2e609992a6e6 WatchSource:0}: Error finding container 4eea9a01aabdb32df16923f3bf1383567a5c5a05c97df009b46b2e609992a6e6: Status 404 returned error can't find the container with id 4eea9a01aabdb32df16923f3bf1383567a5c5a05c97df009b46b2e609992a6e6 Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.819155 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wstpq"] Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.820246 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.822515 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.831137 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wstpq"] Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.892348 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-utilities\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.892791 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-db5bw\" (UniqueName: \"kubernetes.io/projected/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-kube-api-access-db5bw\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.892937 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-catalog-content\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.994212 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-db5bw\" (UniqueName: \"kubernetes.io/projected/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-kube-api-access-db5bw\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.994351 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-catalog-content\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.994385 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-utilities\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.995004 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-utilities\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:28 crc kubenswrapper[4948]: I0122 14:21:28.995558 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-catalog-content\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.016234 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-db5bw\" (UniqueName: \"kubernetes.io/projected/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-kube-api-access-db5bw\") pod \"redhat-marketplace-wstpq\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.168781 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.227830 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kppfw"] Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.229111 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.241059 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kppfw"] Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.300722 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-utilities\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.300882 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg5mq\" (UniqueName: \"kubernetes.io/projected/39643a3e-7da1-4e07-a5f0-1c53df079831-kube-api-access-xg5mq\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.300912 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-catalog-content\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.328989 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.329060 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.348380 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.385417 4948 generic.go:334] "Generic (PLEG): container finished" podID="57035aac-3298-49ff-990c-06d1c53f1e42" containerID="7bf21c188222e71e7ed781fefeb8091e45379e6910bb39fe0b8827ec43cc2da2" exitCode=0 Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.386112 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqmnb" event={"ID":"57035aac-3298-49ff-990c-06d1c53f1e42","Type":"ContainerDied","Data":"7bf21c188222e71e7ed781fefeb8091e45379e6910bb39fe0b8827ec43cc2da2"} Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.396111 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" event={"ID":"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e","Type":"ContainerStarted","Data":"92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd"} Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.396153 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" event={"ID":"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e","Type":"ContainerStarted","Data":"4eea9a01aabdb32df16923f3bf1383567a5c5a05c97df009b46b2e609992a6e6"} Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.399643 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.404618 4948 patch_prober.go:28] interesting pod/router-default-5444994796-xgx8j container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:21:29 crc kubenswrapper[4948]: [-]has-synced failed: reason withheld Jan 22 14:21:29 crc kubenswrapper[4948]: [+]process-running ok Jan 22 14:21:29 crc kubenswrapper[4948]: healthz check failed Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.404675 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xgx8j" podUID="cfbebb9e-7111-4271-bff9-e663d96e586e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.405423 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg5mq\" (UniqueName: \"kubernetes.io/projected/39643a3e-7da1-4e07-a5f0-1c53df079831-kube-api-access-xg5mq\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.405486 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-catalog-content\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.405575 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-utilities\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.407452 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-utilities\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.407471 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-catalog-content\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.411900 4948 generic.go:334] "Generic (PLEG): container finished" podID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerID="22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6" exitCode=0 Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.412051 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h27pl" event={"ID":"8cc0468b-0cc6-43b6-b358-72c5cb9d6050","Type":"ContainerDied","Data":"22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6"} Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.427928 4948 generic.go:334] "Generic (PLEG): container finished" podID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerID="450027f023ba817857345f3e453e532ad6b3339cf927acd216d7718aa110bc98" exitCode=0 Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.428266 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p5mjq" event={"ID":"a15930ec-26ea-4d6f-b1fc-2aa252298992","Type":"ContainerDied","Data":"450027f023ba817857345f3e453e532ad6b3339cf927acd216d7718aa110bc98"} Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.428297 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p5mjq" event={"ID":"a15930ec-26ea-4d6f-b1fc-2aa252298992","Type":"ContainerStarted","Data":"751bda09835693ca05fefb7acd9a67a244f5e8ddffc2c878cafaee85130be123"} Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.455738 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg5mq\" (UniqueName: \"kubernetes.io/projected/39643a3e-7da1-4e07-a5f0-1c53df079831-kube-api-access-xg5mq\") pod \"redhat-marketplace-kppfw\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.481064 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" podStartSLOduration=128.481034774 podStartE2EDuration="2m8.481034774s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:29.478393442 +0000 UTC m=+147.808066371" watchObservedRunningTime="2026-01-22 14:21:29.481034774 +0000 UTC m=+147.810707703" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.537973 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wstpq"] Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.554910 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.617180 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.618518 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.623828 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.624181 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.636618 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.669587 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.670141 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.672148 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.681688 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.720525 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3722b0ae-dab1-4301-b98e-de27b27ab754-config-volume\") pod \"3722b0ae-dab1-4301-b98e-de27b27ab754\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.720592 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3722b0ae-dab1-4301-b98e-de27b27ab754-secret-volume\") pod \"3722b0ae-dab1-4301-b98e-de27b27ab754\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.720694 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bszvw\" (UniqueName: \"kubernetes.io/projected/3722b0ae-dab1-4301-b98e-de27b27ab754-kube-api-access-bszvw\") pod \"3722b0ae-dab1-4301-b98e-de27b27ab754\" (UID: \"3722b0ae-dab1-4301-b98e-de27b27ab754\") " Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.720958 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e261aba5-965d-4345-952b-dccfa38b811d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.721046 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e261aba5-965d-4345-952b-dccfa38b811d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.722615 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3722b0ae-dab1-4301-b98e-de27b27ab754-config-volume" (OuterVolumeSpecName: "config-volume") pod "3722b0ae-dab1-4301-b98e-de27b27ab754" (UID: "3722b0ae-dab1-4301-b98e-de27b27ab754"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.726533 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3722b0ae-dab1-4301-b98e-de27b27ab754-kube-api-access-bszvw" (OuterVolumeSpecName: "kube-api-access-bszvw") pod "3722b0ae-dab1-4301-b98e-de27b27ab754" (UID: "3722b0ae-dab1-4301-b98e-de27b27ab754"). InnerVolumeSpecName "kube-api-access-bszvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.726755 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3722b0ae-dab1-4301-b98e-de27b27ab754-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "3722b0ae-dab1-4301-b98e-de27b27ab754" (UID: "3722b0ae-dab1-4301-b98e-de27b27ab754"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.824305 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e261aba5-965d-4345-952b-dccfa38b811d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.825260 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e261aba5-965d-4345-952b-dccfa38b811d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.827786 4948 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/3722b0ae-dab1-4301-b98e-de27b27ab754-secret-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.827877 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e261aba5-965d-4345-952b-dccfa38b811d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.827915 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bszvw\" (UniqueName: \"kubernetes.io/projected/3722b0ae-dab1-4301-b98e-de27b27ab754-kube-api-access-bszvw\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.827931 4948 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/3722b0ae-dab1-4301-b98e-de27b27ab754-config-volume\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.844596 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-6wr9b"] Jan 22 14:21:29 crc kubenswrapper[4948]: E0122 14:21:29.845111 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3722b0ae-dab1-4301-b98e-de27b27ab754" containerName="collect-profiles" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.845129 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="3722b0ae-dab1-4301-b98e-de27b27ab754" containerName="collect-profiles" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.845262 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="3722b0ae-dab1-4301-b98e-de27b27ab754" containerName="collect-profiles" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.846254 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.849467 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.862060 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6wr9b"] Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.873804 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e261aba5-965d-4345-952b-dccfa38b811d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.951455 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.966907 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.967741 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kppfw"] Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.973353 4948 patch_prober.go:28] interesting pod/downloads-7954f5f757-s4jms container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.973440 4948 patch_prober.go:28] interesting pod/downloads-7954f5f757-s4jms container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" start-of-body= Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.973501 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-s4jms" podUID="a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:21:29 crc kubenswrapper[4948]: I0122 14:21:29.973428 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-s4jms" podUID="a35e6789-77cc-4dd7-acc0-e4ca4ee5cdad" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.30:8080/\": dial tcp 10.217.0.30:8080: connect: connection refused" Jan 22 14:21:29 crc kubenswrapper[4948]: W0122 14:21:29.979391 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39643a3e_7da1_4e07_a5f0_1c53df079831.slice/crio-cdb048502199d8b2d4052e8f6b288cc023f929d7977ee1dc58313a236f48dd12 WatchSource:0}: Error finding container cdb048502199d8b2d4052e8f6b288cc023f929d7977ee1dc58313a236f48dd12: Status 404 returned error can't find the container with id cdb048502199d8b2d4052e8f6b288cc023f929d7977ee1dc58313a236f48dd12 Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.030514 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-98fvp\" (UniqueName: \"kubernetes.io/projected/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-kube-api-access-98fvp\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.030621 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.030653 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-utilities\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.030711 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.030747 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-catalog-content\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.030785 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.030823 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.034767 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.036274 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.037358 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.132773 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-98fvp\" (UniqueName: \"kubernetes.io/projected/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-kube-api-access-98fvp\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.132833 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-utilities\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.133650 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-catalog-content\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.134653 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-utilities\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.134791 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-catalog-content\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.153625 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-98fvp\" (UniqueName: \"kubernetes.io/projected/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-kube-api-access-98fvp\") pod \"redhat-operators-6wr9b\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.187743 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.191642 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.198187 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.205303 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.210606 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.213826 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.226397 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-z4wls"] Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.227894 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.234805 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-catalog-content\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.234886 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-utilities\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.234911 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65dg7\" (UniqueName: \"kubernetes.io/projected/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-kube-api-access-65dg7\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: W0122 14:21:30.237161 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pode261aba5_965d_4345_952b_dccfa38b811d.slice/crio-eebc4784db21c80b5261f4b8229bfc6e5a672c06db52dd681adfb4b9fc03d419 WatchSource:0}: Error finding container eebc4784db21c80b5261f4b8229bfc6e5a672c06db52dd681adfb4b9fc03d419: Status 404 returned error can't find the container with id eebc4784db21c80b5261f4b8229bfc6e5a672c06db52dd681adfb4b9fc03d419 Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.238059 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z4wls"] Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.336477 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-utilities\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.336526 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65dg7\" (UniqueName: \"kubernetes.io/projected/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-kube-api-access-65dg7\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.336586 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-catalog-content\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.337281 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-utilities\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.337308 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-catalog-content\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.356071 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65dg7\" (UniqueName: \"kubernetes.io/projected/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-kube-api-access-65dg7\") pod \"redhat-operators-z4wls\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.380522 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.395192 4948 patch_prober.go:28] interesting pod/router-default-5444994796-xgx8j container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:21:30 crc kubenswrapper[4948]: [-]has-synced failed: reason withheld Jan 22 14:21:30 crc kubenswrapper[4948]: [+]process-running ok Jan 22 14:21:30 crc kubenswrapper[4948]: healthz check failed Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.396823 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xgx8j" podUID="cfbebb9e-7111-4271-bff9-e663d96e586e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.469669 4948 generic.go:334] "Generic (PLEG): container finished" podID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerID="fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b" exitCode=0 Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.469810 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wstpq" event={"ID":"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0","Type":"ContainerDied","Data":"fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b"} Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.469855 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wstpq" event={"ID":"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0","Type":"ContainerStarted","Data":"2deb4b4b8c03459390c225d8d2dac13c749130a09079c33ddf7742995586dc8f"} Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.502696 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" event={"ID":"3722b0ae-dab1-4301-b98e-de27b27ab754","Type":"ContainerDied","Data":"9f2b91317b674fcf0e91ce0236e63eb1cae4c010dc197082f27eee58d69f56fe"} Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.502743 4948 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9f2b91317b674fcf0e91ce0236e63eb1cae4c010dc197082f27eee58d69f56fe" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.502808 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29484855-52bdf" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.519634 4948 generic.go:334] "Generic (PLEG): container finished" podID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerID="fb44e493f473531da958e4d3697468f3c1f1c9be95ca622a92c806fd949955f1" exitCode=0 Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.520193 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kppfw" event={"ID":"39643a3e-7da1-4e07-a5f0-1c53df079831","Type":"ContainerDied","Data":"fb44e493f473531da958e4d3697468f3c1f1c9be95ca622a92c806fd949955f1"} Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.520247 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kppfw" event={"ID":"39643a3e-7da1-4e07-a5f0-1c53df079831","Type":"ContainerStarted","Data":"cdb048502199d8b2d4052e8f6b288cc023f929d7977ee1dc58313a236f48dd12"} Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.530416 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e261aba5-965d-4345-952b-dccfa38b811d","Type":"ContainerStarted","Data":"eebc4784db21c80b5261f4b8229bfc6e5a672c06db52dd681adfb4b9fc03d419"} Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.542745 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-v7qjh" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.546833 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-lkpqv" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.550996 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.882734 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-6wr9b"] Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.961874 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.961914 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.974657 4948 patch_prober.go:28] interesting pod/console-f9d7485db-7kdkq container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.41:8443/health\": dial tcp 10.217.0.41:8443: connect: connection refused" start-of-body= Jan 22 14:21:30 crc kubenswrapper[4948]: I0122 14:21:30.974709 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-7kdkq" podUID="5defb8aa-bd1f-43d9-a86d-0026afa4f8d1" containerName="console" probeResult="failure" output="Get \"https://10.217.0.41:8443/health\": dial tcp 10.217.0.41:8443: connect: connection refused" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.249330 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-z4wls"] Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.383133 4948 patch_prober.go:28] interesting pod/router-default-5444994796-xgx8j container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Jan 22 14:21:31 crc kubenswrapper[4948]: [-]has-synced failed: reason withheld Jan 22 14:21:31 crc kubenswrapper[4948]: [+]process-running ok Jan 22 14:21:31 crc kubenswrapper[4948]: healthz check failed Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.383232 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-xgx8j" podUID="cfbebb9e-7111-4271-bff9-e663d96e586e" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.584715 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e261aba5-965d-4345-952b-dccfa38b811d","Type":"ContainerStarted","Data":"a0ea7582515155a1fa1551239e48850df8ce85700504ab119f6f95349ee1686a"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.598602 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerStarted","Data":"486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.598849 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerStarted","Data":"27085b361928952d183866a950fd73e38fcdc7a2d807eba701cc5439f2b30154"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.600284 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.601464 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.605960 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.606802 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.608303 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"dfa283f2d1cddb2dcfcdf4ceb60696111b6d75ab49cc9cd6e0a0b2eb159d90c6"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.610609 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.614925 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=2.61490118 podStartE2EDuration="2.61490118s" podCreationTimestamp="2026-01-22 14:21:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:31.613748608 +0000 UTC m=+149.943421537" watchObservedRunningTime="2026-01-22 14:21:31.61490118 +0000 UTC m=+149.944574109" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.641322 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"43c6418d9ae3d12354fa1430273d9e505c81eeb62ec4ac3c6482bb71113e5c8d"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.641415 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"54555bfcb29465c7f6576d4ee0047910147bbdd5cbd5cd2214053f6adda49b0d"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.641428 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4wls" event={"ID":"62e1b4fe-576f-48ed-a7ea-a94dfd1af206","Type":"ContainerStarted","Data":"3f9c41f4a5ee292c95a74bd69aaab9b5e1c4fb2006d74a456f71e7df7fee6e88"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.641442 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"32175ced82f1f9d5fccc6791e85e256244691d0b74cd0009b512137419e003ec"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.641454 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"e961ecaaa7d1dc8c71da32fcd9c159f33f7a3400d2e436fa2cda08290bec97a2"} Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.643922 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.699608 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/458cd69f-cb80-4164-a080-990094b74726-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.699696 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/458cd69f-cb80-4164-a080-990094b74726-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.804278 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/458cd69f-cb80-4164-a080-990094b74726-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.804462 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/458cd69f-cb80-4164-a080-990094b74726-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.805440 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/458cd69f-cb80-4164-a080-990094b74726-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:31 crc kubenswrapper[4948]: I0122 14:21:31.839673 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/458cd69f-cb80-4164-a080-990094b74726-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.005609 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.386182 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.390500 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-xgx8j" Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.661054 4948 generic.go:334] "Generic (PLEG): container finished" podID="e261aba5-965d-4345-952b-dccfa38b811d" containerID="a0ea7582515155a1fa1551239e48850df8ce85700504ab119f6f95349ee1686a" exitCode=0 Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.661139 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e261aba5-965d-4345-952b-dccfa38b811d","Type":"ContainerDied","Data":"a0ea7582515155a1fa1551239e48850df8ce85700504ab119f6f95349ee1686a"} Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.679141 4948 generic.go:334] "Generic (PLEG): container finished" podID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerID="486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a" exitCode=0 Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.679228 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerDied","Data":"486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a"} Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.681089 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"58045dd71233a2b1e0d09c130850f0d3dbf49bb34e82fab07de9b616d268fede"} Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.695865 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.697536 4948 generic.go:334] "Generic (PLEG): container finished" podID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerID="ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348" exitCode=0 Jan 22 14:21:32 crc kubenswrapper[4948]: I0122 14:21:32.698565 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4wls" event={"ID":"62e1b4fe-576f-48ed-a7ea-a94dfd1af206","Type":"ContainerDied","Data":"ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348"} Jan 22 14:21:32 crc kubenswrapper[4948]: W0122 14:21:32.792431 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod458cd69f_cb80_4164_a080_990094b74726.slice/crio-b9215a4c35d12881cb562df1d50df19677e1155b64158925abcaa13ee3e2a9f8 WatchSource:0}: Error finding container b9215a4c35d12881cb562df1d50df19677e1155b64158925abcaa13ee3e2a9f8: Status 404 returned error can't find the container with id b9215a4c35d12881cb562df1d50df19677e1155b64158925abcaa13ee3e2a9f8 Jan 22 14:21:33 crc kubenswrapper[4948]: I0122 14:21:33.722290 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"458cd69f-cb80-4164-a080-990094b74726","Type":"ContainerStarted","Data":"cf9e7aa9419e95fbe76fe3ff5c22e72c25e06223313e090599f630b5105484cc"} Jan 22 14:21:33 crc kubenswrapper[4948]: I0122 14:21:33.722707 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"458cd69f-cb80-4164-a080-990094b74726","Type":"ContainerStarted","Data":"b9215a4c35d12881cb562df1d50df19677e1155b64158925abcaa13ee3e2a9f8"} Jan 22 14:21:33 crc kubenswrapper[4948]: I0122 14:21:33.740419 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=2.740398048 podStartE2EDuration="2.740398048s" podCreationTimestamp="2026-01-22 14:21:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:21:33.738232398 +0000 UTC m=+152.067905327" watchObservedRunningTime="2026-01-22 14:21:33.740398048 +0000 UTC m=+152.070070967" Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.187147 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.357343 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e261aba5-965d-4345-952b-dccfa38b811d-kubelet-dir\") pod \"e261aba5-965d-4345-952b-dccfa38b811d\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.357439 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e261aba5-965d-4345-952b-dccfa38b811d-kube-api-access\") pod \"e261aba5-965d-4345-952b-dccfa38b811d\" (UID: \"e261aba5-965d-4345-952b-dccfa38b811d\") " Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.357516 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e261aba5-965d-4345-952b-dccfa38b811d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "e261aba5-965d-4345-952b-dccfa38b811d" (UID: "e261aba5-965d-4345-952b-dccfa38b811d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.357787 4948 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/e261aba5-965d-4345-952b-dccfa38b811d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.387505 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e261aba5-965d-4345-952b-dccfa38b811d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e261aba5-965d-4345-952b-dccfa38b811d" (UID: "e261aba5-965d-4345-952b-dccfa38b811d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.459639 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e261aba5-965d-4345-952b-dccfa38b811d-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.738131 4948 generic.go:334] "Generic (PLEG): container finished" podID="458cd69f-cb80-4164-a080-990094b74726" containerID="cf9e7aa9419e95fbe76fe3ff5c22e72c25e06223313e090599f630b5105484cc" exitCode=0 Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.738229 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"458cd69f-cb80-4164-a080-990094b74726","Type":"ContainerDied","Data":"cf9e7aa9419e95fbe76fe3ff5c22e72c25e06223313e090599f630b5105484cc"} Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.747717 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"e261aba5-965d-4345-952b-dccfa38b811d","Type":"ContainerDied","Data":"eebc4784db21c80b5261f4b8229bfc6e5a672c06db52dd681adfb4b9fc03d419"} Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.747768 4948 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eebc4784db21c80b5261f4b8229bfc6e5a672c06db52dd681adfb4b9fc03d419" Jan 22 14:21:34 crc kubenswrapper[4948]: I0122 14:21:34.747782 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.014167 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-9svkn" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.175353 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.299705 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/458cd69f-cb80-4164-a080-990094b74726-kubelet-dir\") pod \"458cd69f-cb80-4164-a080-990094b74726\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.299891 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/458cd69f-cb80-4164-a080-990094b74726-kube-api-access\") pod \"458cd69f-cb80-4164-a080-990094b74726\" (UID: \"458cd69f-cb80-4164-a080-990094b74726\") " Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.302090 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/458cd69f-cb80-4164-a080-990094b74726-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "458cd69f-cb80-4164-a080-990094b74726" (UID: "458cd69f-cb80-4164-a080-990094b74726"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.309813 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/458cd69f-cb80-4164-a080-990094b74726-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "458cd69f-cb80-4164-a080-990094b74726" (UID: "458cd69f-cb80-4164-a080-990094b74726"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.402534 4948 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/458cd69f-cb80-4164-a080-990094b74726-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.402580 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/458cd69f-cb80-4164-a080-990094b74726-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.784001 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"458cd69f-cb80-4164-a080-990094b74726","Type":"ContainerDied","Data":"b9215a4c35d12881cb562df1d50df19677e1155b64158925abcaa13ee3e2a9f8"} Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.784083 4948 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9215a4c35d12881cb562df1d50df19677e1155b64158925abcaa13ee3e2a9f8" Jan 22 14:21:36 crc kubenswrapper[4948]: I0122 14:21:36.784230 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Jan 22 14:21:39 crc kubenswrapper[4948]: I0122 14:21:39.984093 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-s4jms" Jan 22 14:21:41 crc kubenswrapper[4948]: I0122 14:21:41.036426 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:41 crc kubenswrapper[4948]: I0122 14:21:41.041256 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-7kdkq" Jan 22 14:21:41 crc kubenswrapper[4948]: I0122 14:21:41.414534 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:21:41 crc kubenswrapper[4948]: I0122 14:21:41.414590 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:21:42 crc kubenswrapper[4948]: I0122 14:21:42.833004 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xf6qs"] Jan 22 14:21:42 crc kubenswrapper[4948]: I0122 14:21:42.833680 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" podUID="2314b196-d44a-487b-b9db-e0396b172957" containerName="controller-manager" containerID="cri-o://2117096d8c96d556464c0ecff73e591cc95bd4773ed7028e4445fe14c0fd5642" gracePeriod=30 Jan 22 14:21:42 crc kubenswrapper[4948]: I0122 14:21:42.840937 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm"] Jan 22 14:21:42 crc kubenswrapper[4948]: I0122 14:21:42.841245 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" podUID="fec867e7-c935-489e-b5e9-70d0cf71413b" containerName="route-controller-manager" containerID="cri-o://eb43f0243d3cfb1b9f60d4e6368f173cdda599c17c5e0080d4876ef6d92b8d20" gracePeriod=30 Jan 22 14:21:43 crc kubenswrapper[4948]: E0122 14:21:43.198725 4948 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2314b196_d44a_487b_b9db_e0396b172957.slice/crio-conmon-2117096d8c96d556464c0ecff73e591cc95bd4773ed7028e4445fe14c0fd5642.scope\": RecentStats: unable to find data in memory cache]" Jan 22 14:21:43 crc kubenswrapper[4948]: I0122 14:21:43.871861 4948 generic.go:334] "Generic (PLEG): container finished" podID="fec867e7-c935-489e-b5e9-70d0cf71413b" containerID="eb43f0243d3cfb1b9f60d4e6368f173cdda599c17c5e0080d4876ef6d92b8d20" exitCode=0 Jan 22 14:21:43 crc kubenswrapper[4948]: I0122 14:21:43.871958 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" event={"ID":"fec867e7-c935-489e-b5e9-70d0cf71413b","Type":"ContainerDied","Data":"eb43f0243d3cfb1b9f60d4e6368f173cdda599c17c5e0080d4876ef6d92b8d20"} Jan 22 14:21:43 crc kubenswrapper[4948]: I0122 14:21:43.875369 4948 generic.go:334] "Generic (PLEG): container finished" podID="2314b196-d44a-487b-b9db-e0396b172957" containerID="2117096d8c96d556464c0ecff73e591cc95bd4773ed7028e4445fe14c0fd5642" exitCode=0 Jan 22 14:21:43 crc kubenswrapper[4948]: I0122 14:21:43.875408 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" event={"ID":"2314b196-d44a-487b-b9db-e0396b172957","Type":"ContainerDied","Data":"2117096d8c96d556464c0ecff73e591cc95bd4773ed7028e4445fe14c0fd5642"} Jan 22 14:21:44 crc kubenswrapper[4948]: I0122 14:21:44.957435 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:44 crc kubenswrapper[4948]: I0122 14:21:44.977849 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/1280bb23-c283-4970-a78b-6de7854469ac-metrics-certs\") pod \"network-metrics-daemon-fxccj\" (UID: \"1280bb23-c283-4970-a78b-6de7854469ac\") " pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:45 crc kubenswrapper[4948]: I0122 14:21:45.228031 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fxccj" Jan 22 14:21:48 crc kubenswrapper[4948]: I0122 14:21:48.432901 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:21:49 crc kubenswrapper[4948]: I0122 14:21:49.652084 4948 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xf6qs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Jan 22 14:21:49 crc kubenswrapper[4948]: I0122 14:21:49.652144 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" podUID="2314b196-d44a-487b-b9db-e0396b172957" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Jan 22 14:21:50 crc kubenswrapper[4948]: I0122 14:21:50.941860 4948 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6p6lm container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" start-of-body= Jan 22 14:21:50 crc kubenswrapper[4948]: I0122 14:21:50.941950 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" podUID="fec867e7-c935-489e-b5e9-70d0cf71413b" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": dial tcp 10.217.0.35:8443: connect: connection refused" Jan 22 14:22:00 crc kubenswrapper[4948]: I0122 14:22:00.017559 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-h9sz7" Jan 22 14:22:00 crc kubenswrapper[4948]: I0122 14:22:00.651804 4948 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-xf6qs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 14:22:00 crc kubenswrapper[4948]: I0122 14:22:00.651910 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" podUID="2314b196-d44a-487b-b9db-e0396b172957" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 14:22:01 crc kubenswrapper[4948]: I0122 14:22:01.942078 4948 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-6p6lm container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.35:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Jan 22 14:22:01 crc kubenswrapper[4948]: I0122 14:22:01.942385 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" podUID="fec867e7-c935-489e-b5e9-70d0cf71413b" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.35:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Jan 22 14:22:02 crc kubenswrapper[4948]: E0122 14:22:02.511122 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 14:22:02 crc kubenswrapper[4948]: E0122 14:22:02.511499 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-fvnj2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-mkngx_openshift-marketplace(57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:02 crc kubenswrapper[4948]: E0122 14:22:02.513936 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-mkngx" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" Jan 22 14:22:04 crc kubenswrapper[4948]: E0122 14:22:04.056343 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-mkngx" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" Jan 22 14:22:04 crc kubenswrapper[4948]: E0122 14:22:04.145400 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 14:22:04 crc kubenswrapper[4948]: E0122 14:22:04.145595 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-9mpgl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-h27pl_openshift-marketplace(8cc0468b-0cc6-43b6-b358-72c5cb9d6050): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:04 crc kubenswrapper[4948]: E0122 14:22:04.148048 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-h27pl" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" Jan 22 14:22:04 crc kubenswrapper[4948]: E0122 14:22:04.154551 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Jan 22 14:22:04 crc kubenswrapper[4948]: E0122 14:22:04.154747 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pklq5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-vqmnb_openshift-marketplace(57035aac-3298-49ff-990c-06d1c53f1e42): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:04 crc kubenswrapper[4948]: E0122 14:22:04.156650 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-vqmnb" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.187904 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-h27pl" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.187920 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-vqmnb" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.253331 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.253495 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-db5bw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wstpq_openshift-marketplace(5a4d2a63-38c9-4cd6-b02a-06c27f128cd0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.254664 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-wstpq" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.275607 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.275766 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pgq9g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-p5mjq_openshift-marketplace(a15930ec-26ea-4d6f-b1fc-2aa252298992): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:05 crc kubenswrapper[4948]: E0122 14:22:05.277794 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-p5mjq" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.616995 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-p5mjq" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.618355 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wstpq" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.687805 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.691858 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.721733 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.721939 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xg5mq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-kppfw_openshift-marketplace(39643a3e-7da1-4e07-a5f0-1c53df079831): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.723106 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-kppfw" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.742661 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt"] Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.742982 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2314b196-d44a-487b-b9db-e0396b172957" containerName="controller-manager" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.742998 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="2314b196-d44a-487b-b9db-e0396b172957" containerName="controller-manager" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.743038 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e261aba5-965d-4345-952b-dccfa38b811d" containerName="pruner" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743047 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="e261aba5-965d-4345-952b-dccfa38b811d" containerName="pruner" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.743061 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="458cd69f-cb80-4164-a080-990094b74726" containerName="pruner" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743070 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="458cd69f-cb80-4164-a080-990094b74726" containerName="pruner" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.743079 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fec867e7-c935-489e-b5e9-70d0cf71413b" containerName="route-controller-manager" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743090 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="fec867e7-c935-489e-b5e9-70d0cf71413b" containerName="route-controller-manager" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743230 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="458cd69f-cb80-4164-a080-990094b74726" containerName="pruner" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743246 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="e261aba5-965d-4345-952b-dccfa38b811d" containerName="pruner" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743259 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="fec867e7-c935-489e-b5e9-70d0cf71413b" containerName="route-controller-manager" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743271 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="2314b196-d44a-487b-b9db-e0396b172957" containerName="controller-manager" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.743837 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.762766 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt"] Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.766849 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.767040 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-98fvp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-6wr9b_openshift-marketplace(b89355cf-e0e4-4a32-8519-c75e0a13c7ce): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.768957 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-6wr9b" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.770518 4948 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.770626 4948 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-65dg7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-z4wls_openshift-marketplace(62e1b4fe-576f-48ed-a7ea-a94dfd1af206): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Jan 22 14:22:08 crc kubenswrapper[4948]: E0122 14:22:08.772609 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-z4wls" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.873636 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5llx\" (UniqueName: \"kubernetes.io/projected/fec867e7-c935-489e-b5e9-70d0cf71413b-kube-api-access-k5llx\") pod \"fec867e7-c935-489e-b5e9-70d0cf71413b\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.873769 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2314b196-d44a-487b-b9db-e0396b172957-serving-cert\") pod \"2314b196-d44a-487b-b9db-e0396b172957\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.873813 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-27wzl\" (UniqueName: \"kubernetes.io/projected/2314b196-d44a-487b-b9db-e0396b172957-kube-api-access-27wzl\") pod \"2314b196-d44a-487b-b9db-e0396b172957\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.873874 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-config\") pod \"fec867e7-c935-489e-b5e9-70d0cf71413b\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.873927 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-client-ca\") pod \"2314b196-d44a-487b-b9db-e0396b172957\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.873963 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-config\") pod \"2314b196-d44a-487b-b9db-e0396b172957\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874055 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-proxy-ca-bundles\") pod \"2314b196-d44a-487b-b9db-e0396b172957\" (UID: \"2314b196-d44a-487b-b9db-e0396b172957\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874078 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fec867e7-c935-489e-b5e9-70d0cf71413b-serving-cert\") pod \"fec867e7-c935-489e-b5e9-70d0cf71413b\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874097 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-client-ca\") pod \"fec867e7-c935-489e-b5e9-70d0cf71413b\" (UID: \"fec867e7-c935-489e-b5e9-70d0cf71413b\") " Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874364 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-config\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874398 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-client-ca\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874424 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-serving-cert\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874450 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qglcw\" (UniqueName: \"kubernetes.io/projected/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-kube-api-access-qglcw\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.874898 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "2314b196-d44a-487b-b9db-e0396b172957" (UID: "2314b196-d44a-487b-b9db-e0396b172957"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.875003 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-config" (OuterVolumeSpecName: "config") pod "fec867e7-c935-489e-b5e9-70d0cf71413b" (UID: "fec867e7-c935-489e-b5e9-70d0cf71413b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.875055 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-config" (OuterVolumeSpecName: "config") pod "2314b196-d44a-487b-b9db-e0396b172957" (UID: "2314b196-d44a-487b-b9db-e0396b172957"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.875418 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-client-ca" (OuterVolumeSpecName: "client-ca") pod "2314b196-d44a-487b-b9db-e0396b172957" (UID: "2314b196-d44a-487b-b9db-e0396b172957"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.875604 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-client-ca" (OuterVolumeSpecName: "client-ca") pod "fec867e7-c935-489e-b5e9-70d0cf71413b" (UID: "fec867e7-c935-489e-b5e9-70d0cf71413b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.887707 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fec867e7-c935-489e-b5e9-70d0cf71413b-kube-api-access-k5llx" (OuterVolumeSpecName: "kube-api-access-k5llx") pod "fec867e7-c935-489e-b5e9-70d0cf71413b" (UID: "fec867e7-c935-489e-b5e9-70d0cf71413b"). InnerVolumeSpecName "kube-api-access-k5llx". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.888787 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2314b196-d44a-487b-b9db-e0396b172957-kube-api-access-27wzl" (OuterVolumeSpecName: "kube-api-access-27wzl") pod "2314b196-d44a-487b-b9db-e0396b172957" (UID: "2314b196-d44a-487b-b9db-e0396b172957"). InnerVolumeSpecName "kube-api-access-27wzl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.888849 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2314b196-d44a-487b-b9db-e0396b172957-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "2314b196-d44a-487b-b9db-e0396b172957" (UID: "2314b196-d44a-487b-b9db-e0396b172957"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.888862 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fec867e7-c935-489e-b5e9-70d0cf71413b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "fec867e7-c935-489e-b5e9-70d0cf71413b" (UID: "fec867e7-c935-489e-b5e9-70d0cf71413b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.975873 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-config\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.975979 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-client-ca\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976120 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-serving-cert\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976160 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qglcw\" (UniqueName: \"kubernetes.io/projected/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-kube-api-access-qglcw\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976263 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976281 4948 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976294 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/fec867e7-c935-489e-b5e9-70d0cf71413b-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976306 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976324 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5llx\" (UniqueName: \"kubernetes.io/projected/fec867e7-c935-489e-b5e9-70d0cf71413b-kube-api-access-k5llx\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976336 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/2314b196-d44a-487b-b9db-e0396b172957-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976348 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-27wzl\" (UniqueName: \"kubernetes.io/projected/2314b196-d44a-487b-b9db-e0396b172957-kube-api-access-27wzl\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976363 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/fec867e7-c935-489e-b5e9-70d0cf71413b-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.976375 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/2314b196-d44a-487b-b9db-e0396b172957-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.977625 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-client-ca\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.977723 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-config\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:08 crc kubenswrapper[4948]: I0122 14:22:08.982067 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-serving-cert\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.000192 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qglcw\" (UniqueName: \"kubernetes.io/projected/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-kube-api-access-qglcw\") pod \"route-controller-manager-577dcd5dc8-ccsvt\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.023572 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" event={"ID":"fec867e7-c935-489e-b5e9-70d0cf71413b","Type":"ContainerDied","Data":"1f8b9e6d31ab63e0d00ff4afff589ff2580065e193bf8787cec802394c72f2e6"} Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.023586 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.023783 4948 scope.go:117] "RemoveContainer" containerID="eb43f0243d3cfb1b9f60d4e6368f173cdda599c17c5e0080d4876ef6d92b8d20" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.027174 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.029043 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-xf6qs" event={"ID":"2314b196-d44a-487b-b9db-e0396b172957","Type":"ContainerDied","Data":"d4ab31731798ac231415ef6ead201ed1f50228fc5e57f0857a7935d4a153c0b5"} Jan 22 14:22:09 crc kubenswrapper[4948]: E0122 14:22:09.029100 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-kppfw" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" Jan 22 14:22:09 crc kubenswrapper[4948]: E0122 14:22:09.029410 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-z4wls" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" Jan 22 14:22:09 crc kubenswrapper[4948]: E0122 14:22:09.032555 4948 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-6wr9b" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.046699 4948 scope.go:117] "RemoveContainer" containerID="2117096d8c96d556464c0ecff73e591cc95bd4773ed7028e4445fe14c0fd5642" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.069529 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fxccj"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.084355 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.105005 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.110726 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-6p6lm"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.115311 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xf6qs"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.119221 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-xf6qs"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.180491 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.181289 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.186179 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.186703 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.190761 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.281799 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.281847 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.285522 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt"] Jan 22 14:22:09 crc kubenswrapper[4948]: W0122 14:22:09.293102 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podff9b8e79_f7c7_4b12_912f_aa356fc8ec7d.slice/crio-f61fec06913a797c86e858c2d068445baf1e27bcfcba61e18e07b2059dd48491 WatchSource:0}: Error finding container f61fec06913a797c86e858c2d068445baf1e27bcfcba61e18e07b2059dd48491: Status 404 returned error can't find the container with id f61fec06913a797c86e858c2d068445baf1e27bcfcba61e18e07b2059dd48491 Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.383362 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.383521 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.383835 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.407113 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.508981 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.728407 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.967327 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2314b196-d44a-487b-b9db-e0396b172957" path="/var/lib/kubelet/pods/2314b196-d44a-487b-b9db-e0396b172957/volumes" Jan 22 14:22:09 crc kubenswrapper[4948]: I0122 14:22:09.968354 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fec867e7-c935-489e-b5e9-70d0cf71413b" path="/var/lib/kubelet/pods/fec867e7-c935-489e-b5e9-70d0cf71413b/volumes" Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.036292 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0cb8dc43-47b9-4f5d-ada1-72db187683ed","Type":"ContainerStarted","Data":"61cde42591e13e6f1798ba2b50cdc12f971f704032d604e7f36edb16851f364d"} Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.036339 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0cb8dc43-47b9-4f5d-ada1-72db187683ed","Type":"ContainerStarted","Data":"14050951a6fda5661ec09539d52d573959545146dc835883b74e2fb1e8143fc2"} Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.039256 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" event={"ID":"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d","Type":"ContainerStarted","Data":"4a62dcf3f3c2275c8c2f2a4753dc394b7ebca517fd5756c8dafc6a62398279e2"} Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.039283 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" event={"ID":"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d","Type":"ContainerStarted","Data":"f61fec06913a797c86e858c2d068445baf1e27bcfcba61e18e07b2059dd48491"} Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.040058 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.044148 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fxccj" event={"ID":"1280bb23-c283-4970-a78b-6de7854469ac","Type":"ContainerStarted","Data":"811bf9912fca4d81c3d61cb3341e63177f18c68e604551a9745ae1590edd985b"} Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.044189 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fxccj" event={"ID":"1280bb23-c283-4970-a78b-6de7854469ac","Type":"ContainerStarted","Data":"063900772dcdab34b40a6ad58a961277c2ed04c532b3143d4d8860691cbd8c3d"} Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.044204 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fxccj" event={"ID":"1280bb23-c283-4970-a78b-6de7854469ac","Type":"ContainerStarted","Data":"851d59d46ec0af1415353dcebe3a211c312d04c6fcc6c613c5d3ca9718e281a5"} Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.053502 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=1.053472505 podStartE2EDuration="1.053472505s" podCreationTimestamp="2026-01-22 14:22:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:10.050728581 +0000 UTC m=+188.380401540" watchObservedRunningTime="2026-01-22 14:22:10.053472505 +0000 UTC m=+188.383145434" Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.087743 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" podStartSLOduration=8.087720116 podStartE2EDuration="8.087720116s" podCreationTimestamp="2026-01-22 14:22:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:10.086512414 +0000 UTC m=+188.416185353" watchObservedRunningTime="2026-01-22 14:22:10.087720116 +0000 UTC m=+188.417393045" Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.103000 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-fxccj" podStartSLOduration=169.10298087 podStartE2EDuration="2m49.10298087s" podCreationTimestamp="2026-01-22 14:19:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:10.101961013 +0000 UTC m=+188.431633932" watchObservedRunningTime="2026-01-22 14:22:10.10298087 +0000 UTC m=+188.432653799" Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.177420 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:10 crc kubenswrapper[4948]: I0122 14:22:10.221348 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.053869 4948 generic.go:334] "Generic (PLEG): container finished" podID="0cb8dc43-47b9-4f5d-ada1-72db187683ed" containerID="61cde42591e13e6f1798ba2b50cdc12f971f704032d604e7f36edb16851f364d" exitCode=0 Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.054038 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0cb8dc43-47b9-4f5d-ada1-72db187683ed","Type":"ContainerDied","Data":"61cde42591e13e6f1798ba2b50cdc12f971f704032d604e7f36edb16851f364d"} Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.224796 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-58f78f668-4h2ls"] Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.225901 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.231966 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.233377 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.235478 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58f78f668-4h2ls"] Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.235564 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.235648 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.235715 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.238202 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.249598 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.411714 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tfz42\" (UniqueName: \"kubernetes.io/projected/5d0204c5-32af-4833-8a87-ecfb75837a3f-kube-api-access-tfz42\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.411775 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-proxy-ca-bundles\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.411827 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-config\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.411848 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-client-ca\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.411913 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d0204c5-32af-4833-8a87-ecfb75837a3f-serving-cert\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.414535 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.414608 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.513325 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-config\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.513371 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-client-ca\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.513404 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d0204c5-32af-4833-8a87-ecfb75837a3f-serving-cert\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.513443 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tfz42\" (UniqueName: \"kubernetes.io/projected/5d0204c5-32af-4833-8a87-ecfb75837a3f-kube-api-access-tfz42\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.513471 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-proxy-ca-bundles\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.514999 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-proxy-ca-bundles\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.515236 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-client-ca\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.515310 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-config\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.519309 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d0204c5-32af-4833-8a87-ecfb75837a3f-serving-cert\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.546062 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tfz42\" (UniqueName: \"kubernetes.io/projected/5d0204c5-32af-4833-8a87-ecfb75837a3f-kube-api-access-tfz42\") pod \"controller-manager-58f78f668-4h2ls\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.580895 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:11 crc kubenswrapper[4948]: I0122 14:22:11.823366 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-58f78f668-4h2ls"] Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.061675 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" event={"ID":"5d0204c5-32af-4833-8a87-ecfb75837a3f","Type":"ContainerStarted","Data":"a66d38c94399bcfdb01952e80d76d9b53dac4aa183e37afed8d5efb4beb2b643"} Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.062138 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" event={"ID":"5d0204c5-32af-4833-8a87-ecfb75837a3f","Type":"ContainerStarted","Data":"ce40e9ae69c4f1e978cad7659ed3478c450d6afc89e27f6f215136400c8f6dc8"} Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.062497 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.068573 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.080253 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" podStartSLOduration=10.080227809 podStartE2EDuration="10.080227809s" podCreationTimestamp="2026-01-22 14:22:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:12.079758517 +0000 UTC m=+190.409431456" watchObservedRunningTime="2026-01-22 14:22:12.080227809 +0000 UTC m=+190.409900738" Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.317163 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.432499 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kube-api-access\") pod \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.433928 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kubelet-dir\") pod \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\" (UID: \"0cb8dc43-47b9-4f5d-ada1-72db187683ed\") " Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.434102 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0cb8dc43-47b9-4f5d-ada1-72db187683ed" (UID: "0cb8dc43-47b9-4f5d-ada1-72db187683ed"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.434339 4948 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.441207 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0cb8dc43-47b9-4f5d-ada1-72db187683ed" (UID: "0cb8dc43-47b9-4f5d-ada1-72db187683ed"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:12 crc kubenswrapper[4948]: I0122 14:22:12.535911 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0cb8dc43-47b9-4f5d-ada1-72db187683ed-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:13 crc kubenswrapper[4948]: I0122 14:22:13.069818 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0cb8dc43-47b9-4f5d-ada1-72db187683ed","Type":"ContainerDied","Data":"14050951a6fda5661ec09539d52d573959545146dc835883b74e2fb1e8143fc2"} Jan 22 14:22:13 crc kubenswrapper[4948]: I0122 14:22:13.070465 4948 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="14050951a6fda5661ec09539d52d573959545146dc835883b74e2fb1e8143fc2" Jan 22 14:22:13 crc kubenswrapper[4948]: I0122 14:22:13.070650 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Jan 22 14:22:15 crc kubenswrapper[4948]: I0122 14:22:15.979812 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 14:22:15 crc kubenswrapper[4948]: E0122 14:22:15.980577 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0cb8dc43-47b9-4f5d-ada1-72db187683ed" containerName="pruner" Jan 22 14:22:15 crc kubenswrapper[4948]: I0122 14:22:15.980595 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="0cb8dc43-47b9-4f5d-ada1-72db187683ed" containerName="pruner" Jan 22 14:22:15 crc kubenswrapper[4948]: I0122 14:22:15.980759 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="0cb8dc43-47b9-4f5d-ada1-72db187683ed" containerName="pruner" Jan 22 14:22:15 crc kubenswrapper[4948]: I0122 14:22:15.981276 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:15 crc kubenswrapper[4948]: I0122 14:22:15.985121 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Jan 22 14:22:15 crc kubenswrapper[4948]: I0122 14:22:15.985208 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Jan 22 14:22:15 crc kubenswrapper[4948]: I0122 14:22:15.999251 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.087999 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-var-lock\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.088115 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.088141 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kube-api-access\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.189472 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.189544 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kube-api-access\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.189621 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-var-lock\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.189684 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.189708 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-var-lock\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.212314 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kube-api-access\") pod \"installer-9-crc\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.297817 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:16 crc kubenswrapper[4948]: I0122 14:22:16.720685 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Jan 22 14:22:16 crc kubenswrapper[4948]: W0122 14:22:16.738254 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod4a6ff136_0872_4b4b_9104_d7c66e20d6f3.slice/crio-6d6521437881ad98c886f006840dadf8f1018f758cefbb5a8e6cf72ed700f08f WatchSource:0}: Error finding container 6d6521437881ad98c886f006840dadf8f1018f758cefbb5a8e6cf72ed700f08f: Status 404 returned error can't find the container with id 6d6521437881ad98c886f006840dadf8f1018f758cefbb5a8e6cf72ed700f08f Jan 22 14:22:17 crc kubenswrapper[4948]: I0122 14:22:17.098475 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4a6ff136-0872-4b4b-9104-d7c66e20d6f3","Type":"ContainerStarted","Data":"6d6521437881ad98c886f006840dadf8f1018f758cefbb5a8e6cf72ed700f08f"} Jan 22 14:22:18 crc kubenswrapper[4948]: I0122 14:22:18.106686 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqmnb" event={"ID":"57035aac-3298-49ff-990c-06d1c53f1e42","Type":"ContainerStarted","Data":"c5fd6959ace12fee8e0c2b531486ec6b8900be7d1a26fe245eabaaa25802412e"} Jan 22 14:22:18 crc kubenswrapper[4948]: I0122 14:22:18.108121 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4a6ff136-0872-4b4b-9104-d7c66e20d6f3","Type":"ContainerStarted","Data":"52c4281899dbbf57a17ff12142db6d062f7e1def8d22791022c5d7c24e4bf4ff"} Jan 22 14:22:18 crc kubenswrapper[4948]: I0122 14:22:18.111411 4948 generic.go:334] "Generic (PLEG): container finished" podID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerID="bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d" exitCode=0 Jan 22 14:22:18 crc kubenswrapper[4948]: I0122 14:22:18.111472 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mkngx" event={"ID":"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce","Type":"ContainerDied","Data":"bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d"} Jan 22 14:22:18 crc kubenswrapper[4948]: I0122 14:22:18.156410 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=3.156388531 podStartE2EDuration="3.156388531s" podCreationTimestamp="2026-01-22 14:22:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:18.154827908 +0000 UTC m=+196.484500847" watchObservedRunningTime="2026-01-22 14:22:18.156388531 +0000 UTC m=+196.486061460" Jan 22 14:22:19 crc kubenswrapper[4948]: I0122 14:22:19.117805 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mkngx" event={"ID":"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce","Type":"ContainerStarted","Data":"ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198"} Jan 22 14:22:19 crc kubenswrapper[4948]: I0122 14:22:19.120082 4948 generic.go:334] "Generic (PLEG): container finished" podID="57035aac-3298-49ff-990c-06d1c53f1e42" containerID="c5fd6959ace12fee8e0c2b531486ec6b8900be7d1a26fe245eabaaa25802412e" exitCode=0 Jan 22 14:22:19 crc kubenswrapper[4948]: I0122 14:22:19.120163 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqmnb" event={"ID":"57035aac-3298-49ff-990c-06d1c53f1e42","Type":"ContainerDied","Data":"c5fd6959ace12fee8e0c2b531486ec6b8900be7d1a26fe245eabaaa25802412e"} Jan 22 14:22:19 crc kubenswrapper[4948]: I0122 14:22:19.139235 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-mkngx" podStartSLOduration=2.910645612 podStartE2EDuration="53.139214903s" podCreationTimestamp="2026-01-22 14:21:26 +0000 UTC" firstStartedPulling="2026-01-22 14:21:28.316505984 +0000 UTC m=+146.646178903" lastFinishedPulling="2026-01-22 14:22:18.545075265 +0000 UTC m=+196.874748194" observedRunningTime="2026-01-22 14:22:19.134638199 +0000 UTC m=+197.464311148" watchObservedRunningTime="2026-01-22 14:22:19.139214903 +0000 UTC m=+197.468887842" Jan 22 14:22:20 crc kubenswrapper[4948]: I0122 14:22:20.128546 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqmnb" event={"ID":"57035aac-3298-49ff-990c-06d1c53f1e42","Type":"ContainerStarted","Data":"0461e371deffc16af4711b28b0b3dac586f45aef58c3ca2821b7cf41f6322216"} Jan 22 14:22:20 crc kubenswrapper[4948]: I0122 14:22:20.148305 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-vqmnb" podStartSLOduration=2.827751478 podStartE2EDuration="53.148278068s" podCreationTimestamp="2026-01-22 14:21:27 +0000 UTC" firstStartedPulling="2026-01-22 14:21:29.390196395 +0000 UTC m=+147.719869324" lastFinishedPulling="2026-01-22 14:22:19.710722995 +0000 UTC m=+198.040395914" observedRunningTime="2026-01-22 14:22:20.14651203 +0000 UTC m=+198.476184969" watchObservedRunningTime="2026-01-22 14:22:20.148278068 +0000 UTC m=+198.477950987" Jan 22 14:22:21 crc kubenswrapper[4948]: I0122 14:22:21.140264 4948 generic.go:334] "Generic (PLEG): container finished" podID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerID="0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d" exitCode=0 Jan 22 14:22:21 crc kubenswrapper[4948]: I0122 14:22:21.140374 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h27pl" event={"ID":"8cc0468b-0cc6-43b6-b358-72c5cb9d6050","Type":"ContainerDied","Data":"0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d"} Jan 22 14:22:21 crc kubenswrapper[4948]: I0122 14:22:21.144399 4948 generic.go:334] "Generic (PLEG): container finished" podID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerID="787f7634f32d01088e3a358a675e62af08bab0c3f31343c95eec1d5770475c6c" exitCode=0 Jan 22 14:22:21 crc kubenswrapper[4948]: I0122 14:22:21.144443 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p5mjq" event={"ID":"a15930ec-26ea-4d6f-b1fc-2aa252298992","Type":"ContainerDied","Data":"787f7634f32d01088e3a358a675e62af08bab0c3f31343c95eec1d5770475c6c"} Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.154336 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p5mjq" event={"ID":"a15930ec-26ea-4d6f-b1fc-2aa252298992","Type":"ContainerStarted","Data":"d32dd8e67a4e5cac1bad2b90f7c2b2a2df6f42d6203e8ec832afc32a75713293"} Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.158490 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h27pl" event={"ID":"8cc0468b-0cc6-43b6-b358-72c5cb9d6050","Type":"ContainerStarted","Data":"4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a"} Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.160741 4948 generic.go:334] "Generic (PLEG): container finished" podID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerID="b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219" exitCode=0 Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.160807 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wstpq" event={"ID":"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0","Type":"ContainerDied","Data":"b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219"} Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.178084 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-p5mjq" podStartSLOduration=2.974106811 podStartE2EDuration="55.178059354s" podCreationTimestamp="2026-01-22 14:21:27 +0000 UTC" firstStartedPulling="2026-01-22 14:21:29.432474974 +0000 UTC m=+147.762147903" lastFinishedPulling="2026-01-22 14:22:21.636427517 +0000 UTC m=+199.966100446" observedRunningTime="2026-01-22 14:22:22.174626961 +0000 UTC m=+200.504299890" watchObservedRunningTime="2026-01-22 14:22:22.178059354 +0000 UTC m=+200.507732283" Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.219215 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h27pl" podStartSLOduration=4.104780762 podStartE2EDuration="56.219194285s" podCreationTimestamp="2026-01-22 14:21:26 +0000 UTC" firstStartedPulling="2026-01-22 14:21:29.423359766 +0000 UTC m=+147.753032695" lastFinishedPulling="2026-01-22 14:22:21.537773289 +0000 UTC m=+199.867446218" observedRunningTime="2026-01-22 14:22:22.21348917 +0000 UTC m=+200.543162099" watchObservedRunningTime="2026-01-22 14:22:22.219194285 +0000 UTC m=+200.548867204" Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.804045 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58f78f668-4h2ls"] Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.804310 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" podUID="5d0204c5-32af-4833-8a87-ecfb75837a3f" containerName="controller-manager" containerID="cri-o://a66d38c94399bcfdb01952e80d76d9b53dac4aa183e37afed8d5efb4beb2b643" gracePeriod=30 Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.841135 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt"] Jan 22 14:22:22 crc kubenswrapper[4948]: I0122 14:22:22.841403 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" podUID="ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" containerName="route-controller-manager" containerID="cri-o://4a62dcf3f3c2275c8c2f2a4753dc394b7ebca517fd5756c8dafc6a62398279e2" gracePeriod=30 Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.170179 4948 generic.go:334] "Generic (PLEG): container finished" podID="5d0204c5-32af-4833-8a87-ecfb75837a3f" containerID="a66d38c94399bcfdb01952e80d76d9b53dac4aa183e37afed8d5efb4beb2b643" exitCode=0 Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.170256 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" event={"ID":"5d0204c5-32af-4833-8a87-ecfb75837a3f","Type":"ContainerDied","Data":"a66d38c94399bcfdb01952e80d76d9b53dac4aa183e37afed8d5efb4beb2b643"} Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.173328 4948 generic.go:334] "Generic (PLEG): container finished" podID="ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" containerID="4a62dcf3f3c2275c8c2f2a4753dc394b7ebca517fd5756c8dafc6a62398279e2" exitCode=0 Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.173393 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" event={"ID":"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d","Type":"ContainerDied","Data":"4a62dcf3f3c2275c8c2f2a4753dc394b7ebca517fd5756c8dafc6a62398279e2"} Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.177257 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wstpq" event={"ID":"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0","Type":"ContainerStarted","Data":"01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99"} Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.478106 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.481917 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-proxy-ca-bundles\") pod \"5d0204c5-32af-4833-8a87-ecfb75837a3f\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.481965 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-config\") pod \"5d0204c5-32af-4833-8a87-ecfb75837a3f\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.481984 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-client-ca\") pod \"5d0204c5-32af-4833-8a87-ecfb75837a3f\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.482051 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tfz42\" (UniqueName: \"kubernetes.io/projected/5d0204c5-32af-4833-8a87-ecfb75837a3f-kube-api-access-tfz42\") pod \"5d0204c5-32af-4833-8a87-ecfb75837a3f\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.482075 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d0204c5-32af-4833-8a87-ecfb75837a3f-serving-cert\") pod \"5d0204c5-32af-4833-8a87-ecfb75837a3f\" (UID: \"5d0204c5-32af-4833-8a87-ecfb75837a3f\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.482940 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-client-ca" (OuterVolumeSpecName: "client-ca") pod "5d0204c5-32af-4833-8a87-ecfb75837a3f" (UID: "5d0204c5-32af-4833-8a87-ecfb75837a3f"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.482959 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "5d0204c5-32af-4833-8a87-ecfb75837a3f" (UID: "5d0204c5-32af-4833-8a87-ecfb75837a3f"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.483455 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-config" (OuterVolumeSpecName: "config") pod "5d0204c5-32af-4833-8a87-ecfb75837a3f" (UID: "5d0204c5-32af-4833-8a87-ecfb75837a3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.498366 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5d0204c5-32af-4833-8a87-ecfb75837a3f-kube-api-access-tfz42" (OuterVolumeSpecName: "kube-api-access-tfz42") pod "5d0204c5-32af-4833-8a87-ecfb75837a3f" (UID: "5d0204c5-32af-4833-8a87-ecfb75837a3f"). InnerVolumeSpecName "kube-api-access-tfz42". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.501939 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5d0204c5-32af-4833-8a87-ecfb75837a3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5d0204c5-32af-4833-8a87-ecfb75837a3f" (UID: "5d0204c5-32af-4833-8a87-ecfb75837a3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.502792 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wstpq" podStartSLOduration=3.3767251160000002 podStartE2EDuration="55.502764918s" podCreationTimestamp="2026-01-22 14:21:28 +0000 UTC" firstStartedPulling="2026-01-22 14:21:30.482241145 +0000 UTC m=+148.811914074" lastFinishedPulling="2026-01-22 14:22:22.608280947 +0000 UTC m=+200.937953876" observedRunningTime="2026-01-22 14:22:23.206315151 +0000 UTC m=+201.535988070" watchObservedRunningTime="2026-01-22 14:22:23.502764918 +0000 UTC m=+201.832437847" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.548658 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.585280 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.585324 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.585338 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tfz42\" (UniqueName: \"kubernetes.io/projected/5d0204c5-32af-4833-8a87-ecfb75837a3f-kube-api-access-tfz42\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.585361 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5d0204c5-32af-4833-8a87-ecfb75837a3f-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.585372 4948 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/5d0204c5-32af-4833-8a87-ecfb75837a3f-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.685804 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-config\") pod \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.686728 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qglcw\" (UniqueName: \"kubernetes.io/projected/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-kube-api-access-qglcw\") pod \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.686788 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-client-ca\") pod \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.686849 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-serving-cert\") pod \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\" (UID: \"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d\") " Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.686655 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-config" (OuterVolumeSpecName: "config") pod "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" (UID: "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.687968 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.689318 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-client-ca" (OuterVolumeSpecName: "client-ca") pod "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" (UID: "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.690886 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" (UID: "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.691768 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-kube-api-access-qglcw" (OuterVolumeSpecName: "kube-api-access-qglcw") pod "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" (UID: "ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d"). InnerVolumeSpecName "kube-api-access-qglcw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.789275 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.789325 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:23 crc kubenswrapper[4948]: I0122 14:22:23.789339 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qglcw\" (UniqueName: \"kubernetes.io/projected/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d-kube-api-access-qglcw\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.183631 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" event={"ID":"5d0204c5-32af-4833-8a87-ecfb75837a3f","Type":"ContainerDied","Data":"ce40e9ae69c4f1e978cad7659ed3478c450d6afc89e27f6f215136400c8f6dc8"} Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.183698 4948 scope.go:117] "RemoveContainer" containerID="a66d38c94399bcfdb01952e80d76d9b53dac4aa183e37afed8d5efb4beb2b643" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.183723 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-58f78f668-4h2ls" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.185186 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" event={"ID":"ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d","Type":"ContainerDied","Data":"f61fec06913a797c86e858c2d068445baf1e27bcfcba61e18e07b2059dd48491"} Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.185264 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.202237 4948 scope.go:117] "RemoveContainer" containerID="4a62dcf3f3c2275c8c2f2a4753dc394b7ebca517fd5756c8dafc6a62398279e2" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.203344 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-58f78f668-4h2ls"] Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.214679 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-58f78f668-4h2ls"] Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.226915 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt"] Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.231496 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-577dcd5dc8-ccsvt"] Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.235850 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k"] Jan 22 14:22:24 crc kubenswrapper[4948]: E0122 14:22:24.236176 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5d0204c5-32af-4833-8a87-ecfb75837a3f" containerName="controller-manager" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.236195 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="5d0204c5-32af-4833-8a87-ecfb75837a3f" containerName="controller-manager" Jan 22 14:22:24 crc kubenswrapper[4948]: E0122 14:22:24.236227 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" containerName="route-controller-manager" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.236234 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" containerName="route-controller-manager" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.236351 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" containerName="route-controller-manager" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.236373 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="5d0204c5-32af-4833-8a87-ecfb75837a3f" containerName="controller-manager" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.236848 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.239032 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.239222 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.239286 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.239591 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.239830 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.240835 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.244635 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95"] Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.246837 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.249062 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95"] Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.249168 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.251674 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.251791 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.251796 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.251981 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.252260 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.252921 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k"] Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.254525 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399184 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-config\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399268 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-client-ca\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399300 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-client-ca\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399331 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-config\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399486 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8f939e-3201-4147-9768-6e087b892afd-serving-cert\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399579 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/399eaa47-b30b-4736-90e1-5f888a9a8e66-serving-cert\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399612 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-proxy-ca-bundles\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399664 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2p6v\" (UniqueName: \"kubernetes.io/projected/8f8f939e-3201-4147-9768-6e087b892afd-kube-api-access-q2p6v\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.399705 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d28fw\" (UniqueName: \"kubernetes.io/projected/399eaa47-b30b-4736-90e1-5f888a9a8e66-kube-api-access-d28fw\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501146 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d28fw\" (UniqueName: \"kubernetes.io/projected/399eaa47-b30b-4736-90e1-5f888a9a8e66-kube-api-access-d28fw\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501212 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-config\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501276 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-client-ca\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501305 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-client-ca\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501329 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-config\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501351 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8f939e-3201-4147-9768-6e087b892afd-serving-cert\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501373 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/399eaa47-b30b-4736-90e1-5f888a9a8e66-serving-cert\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501389 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-proxy-ca-bundles\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.501417 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2p6v\" (UniqueName: \"kubernetes.io/projected/8f8f939e-3201-4147-9768-6e087b892afd-kube-api-access-q2p6v\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.502419 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-client-ca\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.502639 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-config\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.503145 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-client-ca\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.503611 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-config\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.503703 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-proxy-ca-bundles\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.506686 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/399eaa47-b30b-4736-90e1-5f888a9a8e66-serving-cert\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.506691 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8f939e-3201-4147-9768-6e087b892afd-serving-cert\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.517287 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d28fw\" (UniqueName: \"kubernetes.io/projected/399eaa47-b30b-4736-90e1-5f888a9a8e66-kube-api-access-d28fw\") pod \"controller-manager-5c66c6d6cd-swp8k\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.518132 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2p6v\" (UniqueName: \"kubernetes.io/projected/8f8f939e-3201-4147-9768-6e087b892afd-kube-api-access-q2p6v\") pod \"route-controller-manager-7f4676974b-5tz95\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.555164 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.564877 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.823878 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95"] Jan 22 14:22:24 crc kubenswrapper[4948]: W0122 14:22:24.832343 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8f8f939e_3201_4147_9768_6e087b892afd.slice/crio-20cf9a85c7c2d0ad47282034cef14605f96b6d2abf2be66383ba8c482be5d854 WatchSource:0}: Error finding container 20cf9a85c7c2d0ad47282034cef14605f96b6d2abf2be66383ba8c482be5d854: Status 404 returned error can't find the container with id 20cf9a85c7c2d0ad47282034cef14605f96b6d2abf2be66383ba8c482be5d854 Jan 22 14:22:24 crc kubenswrapper[4948]: I0122 14:22:24.951457 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k"] Jan 22 14:22:24 crc kubenswrapper[4948]: W0122 14:22:24.955341 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod399eaa47_b30b_4736_90e1_5f888a9a8e66.slice/crio-c0324a23be9aeb4fe8820234021db29f4c005961766854944cd4effe9a3aaac5 WatchSource:0}: Error finding container c0324a23be9aeb4fe8820234021db29f4c005961766854944cd4effe9a3aaac5: Status 404 returned error can't find the container with id c0324a23be9aeb4fe8820234021db29f4c005961766854944cd4effe9a3aaac5 Jan 22 14:22:25 crc kubenswrapper[4948]: I0122 14:22:25.194493 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" event={"ID":"399eaa47-b30b-4736-90e1-5f888a9a8e66","Type":"ContainerStarted","Data":"c0324a23be9aeb4fe8820234021db29f4c005961766854944cd4effe9a3aaac5"} Jan 22 14:22:25 crc kubenswrapper[4948]: I0122 14:22:25.196185 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" event={"ID":"8f8f939e-3201-4147-9768-6e087b892afd","Type":"ContainerStarted","Data":"20cf9a85c7c2d0ad47282034cef14605f96b6d2abf2be66383ba8c482be5d854"} Jan 22 14:22:25 crc kubenswrapper[4948]: I0122 14:22:25.964725 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5d0204c5-32af-4833-8a87-ecfb75837a3f" path="/var/lib/kubelet/pods/5d0204c5-32af-4833-8a87-ecfb75837a3f/volumes" Jan 22 14:22:25 crc kubenswrapper[4948]: I0122 14:22:25.965725 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d" path="/var/lib/kubelet/pods/ff9b8e79-f7c7-4b12-912f-aa356fc8ec7d/volumes" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.202521 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" event={"ID":"399eaa47-b30b-4736-90e1-5f888a9a8e66","Type":"ContainerStarted","Data":"d797555b64b714a8d6bd32de4911fdc1b018498670356fdb232795bb0e3e0fd6"} Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.202784 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.203895 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" event={"ID":"8f8f939e-3201-4147-9768-6e087b892afd","Type":"ContainerStarted","Data":"788a1b63babb2e046df8adf30169aeca27e02765edc23c7f1ca7ddcdaf5d2c49"} Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.204180 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.215366 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.231948 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" podStartSLOduration=4.231927347 podStartE2EDuration="4.231927347s" podCreationTimestamp="2026-01-22 14:22:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:26.230733904 +0000 UTC m=+204.560406833" watchObservedRunningTime="2026-01-22 14:22:26.231927347 +0000 UTC m=+204.561600296" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.250743 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.261575 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" podStartSLOduration=4.261555344 podStartE2EDuration="4.261555344s" podCreationTimestamp="2026-01-22 14:22:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:26.255716155 +0000 UTC m=+204.585389084" watchObservedRunningTime="2026-01-22 14:22:26.261555344 +0000 UTC m=+204.591228273" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.980890 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:22:26 crc kubenswrapper[4948]: I0122 14:22:26.981226 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.160471 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.212215 4948 generic.go:334] "Generic (PLEG): container finished" podID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerID="059bf71a51b973b2850107e64e4ad7b30e3935dc08b07d3946f7f25f431b8368" exitCode=0 Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.212315 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kppfw" event={"ID":"39643a3e-7da1-4e07-a5f0-1c53df079831","Type":"ContainerDied","Data":"059bf71a51b973b2850107e64e4ad7b30e3935dc08b07d3946f7f25f431b8368"} Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.251950 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.256366 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.256450 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.305448 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.421588 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.421684 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.466913 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.780789 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.780858 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:22:27 crc kubenswrapper[4948]: I0122 14:22:27.820694 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:22:28 crc kubenswrapper[4948]: I0122 14:22:28.262535 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:22:28 crc kubenswrapper[4948]: I0122 14:22:28.266827 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:22:28 crc kubenswrapper[4948]: I0122 14:22:28.266906 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:22:29 crc kubenswrapper[4948]: I0122 14:22:29.170389 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:22:29 crc kubenswrapper[4948]: I0122 14:22:29.170755 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:22:29 crc kubenswrapper[4948]: I0122 14:22:29.224250 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:22:29 crc kubenswrapper[4948]: I0122 14:22:29.320485 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:22:30 crc kubenswrapper[4948]: I0122 14:22:30.195004 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vqmnb"] Jan 22 14:22:30 crc kubenswrapper[4948]: I0122 14:22:30.235202 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-vqmnb" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="registry-server" containerID="cri-o://0461e371deffc16af4711b28b0b3dac586f45aef58c3ca2821b7cf41f6322216" gracePeriod=2 Jan 22 14:22:30 crc kubenswrapper[4948]: I0122 14:22:30.392771 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p5mjq"] Jan 22 14:22:30 crc kubenswrapper[4948]: I0122 14:22:30.393072 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-p5mjq" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="registry-server" containerID="cri-o://d32dd8e67a4e5cac1bad2b90f7c2b2a2df6f42d6203e8ec832afc32a75713293" gracePeriod=2 Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.254902 4948 generic.go:334] "Generic (PLEG): container finished" podID="57035aac-3298-49ff-990c-06d1c53f1e42" containerID="0461e371deffc16af4711b28b0b3dac586f45aef58c3ca2821b7cf41f6322216" exitCode=0 Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.254955 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqmnb" event={"ID":"57035aac-3298-49ff-990c-06d1c53f1e42","Type":"ContainerDied","Data":"0461e371deffc16af4711b28b0b3dac586f45aef58c3ca2821b7cf41f6322216"} Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.259373 4948 generic.go:334] "Generic (PLEG): container finished" podID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerID="d32dd8e67a4e5cac1bad2b90f7c2b2a2df6f42d6203e8ec832afc32a75713293" exitCode=0 Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.259401 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p5mjq" event={"ID":"a15930ec-26ea-4d6f-b1fc-2aa252298992","Type":"ContainerDied","Data":"d32dd8e67a4e5cac1bad2b90f7c2b2a2df6f42d6203e8ec832afc32a75713293"} Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.599594 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.661099 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-utilities\") pod \"57035aac-3298-49ff-990c-06d1c53f1e42\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.661231 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-catalog-content\") pod \"57035aac-3298-49ff-990c-06d1c53f1e42\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.661290 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pklq5\" (UniqueName: \"kubernetes.io/projected/57035aac-3298-49ff-990c-06d1c53f1e42-kube-api-access-pklq5\") pod \"57035aac-3298-49ff-990c-06d1c53f1e42\" (UID: \"57035aac-3298-49ff-990c-06d1c53f1e42\") " Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.663228 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-utilities" (OuterVolumeSpecName: "utilities") pod "57035aac-3298-49ff-990c-06d1c53f1e42" (UID: "57035aac-3298-49ff-990c-06d1c53f1e42"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.675659 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57035aac-3298-49ff-990c-06d1c53f1e42-kube-api-access-pklq5" (OuterVolumeSpecName: "kube-api-access-pklq5") pod "57035aac-3298-49ff-990c-06d1c53f1e42" (UID: "57035aac-3298-49ff-990c-06d1c53f1e42"). InnerVolumeSpecName "kube-api-access-pklq5". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.727979 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57035aac-3298-49ff-990c-06d1c53f1e42" (UID: "57035aac-3298-49ff-990c-06d1c53f1e42"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.764050 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.764100 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57035aac-3298-49ff-990c-06d1c53f1e42-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.764121 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pklq5\" (UniqueName: \"kubernetes.io/projected/57035aac-3298-49ff-990c-06d1c53f1e42-kube-api-access-pklq5\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.958600 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.967143 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-utilities\") pod \"a15930ec-26ea-4d6f-b1fc-2aa252298992\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.967236 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pgq9g\" (UniqueName: \"kubernetes.io/projected/a15930ec-26ea-4d6f-b1fc-2aa252298992-kube-api-access-pgq9g\") pod \"a15930ec-26ea-4d6f-b1fc-2aa252298992\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.967426 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-catalog-content\") pod \"a15930ec-26ea-4d6f-b1fc-2aa252298992\" (UID: \"a15930ec-26ea-4d6f-b1fc-2aa252298992\") " Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.970070 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-utilities" (OuterVolumeSpecName: "utilities") pod "a15930ec-26ea-4d6f-b1fc-2aa252298992" (UID: "a15930ec-26ea-4d6f-b1fc-2aa252298992"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.972168 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a15930ec-26ea-4d6f-b1fc-2aa252298992-kube-api-access-pgq9g" (OuterVolumeSpecName: "kube-api-access-pgq9g") pod "a15930ec-26ea-4d6f-b1fc-2aa252298992" (UID: "a15930ec-26ea-4d6f-b1fc-2aa252298992"). InnerVolumeSpecName "kube-api-access-pgq9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.976720 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:32 crc kubenswrapper[4948]: I0122 14:22:32.976775 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pgq9g\" (UniqueName: \"kubernetes.io/projected/a15930ec-26ea-4d6f-b1fc-2aa252298992-kube-api-access-pgq9g\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.041318 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a15930ec-26ea-4d6f-b1fc-2aa252298992" (UID: "a15930ec-26ea-4d6f-b1fc-2aa252298992"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.077968 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a15930ec-26ea-4d6f-b1fc-2aa252298992-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.267211 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-vqmnb" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.267191 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-vqmnb" event={"ID":"57035aac-3298-49ff-990c-06d1c53f1e42","Type":"ContainerDied","Data":"4b30fcd584f0c87b31571e07cc5aa2cd6a5d5e71d77046554a8bdcd68341730d"} Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.267380 4948 scope.go:117] "RemoveContainer" containerID="0461e371deffc16af4711b28b0b3dac586f45aef58c3ca2821b7cf41f6322216" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.270646 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-p5mjq" event={"ID":"a15930ec-26ea-4d6f-b1fc-2aa252298992","Type":"ContainerDied","Data":"751bda09835693ca05fefb7acd9a67a244f5e8ddffc2c878cafaee85130be123"} Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.270733 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-p5mjq" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.299926 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-p5mjq"] Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.306212 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-p5mjq"] Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.318694 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-vqmnb"] Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.320925 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-vqmnb"] Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.591748 4948 scope.go:117] "RemoveContainer" containerID="c5fd6959ace12fee8e0c2b531486ec6b8900be7d1a26fe245eabaaa25802412e" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.619960 4948 scope.go:117] "RemoveContainer" containerID="7bf21c188222e71e7ed781fefeb8091e45379e6910bb39fe0b8827ec43cc2da2" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.646313 4948 scope.go:117] "RemoveContainer" containerID="d32dd8e67a4e5cac1bad2b90f7c2b2a2df6f42d6203e8ec832afc32a75713293" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.679263 4948 scope.go:117] "RemoveContainer" containerID="787f7634f32d01088e3a358a675e62af08bab0c3f31343c95eec1d5770475c6c" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.743282 4948 scope.go:117] "RemoveContainer" containerID="450027f023ba817857345f3e453e532ad6b3339cf927acd216d7718aa110bc98" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.995212 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" path="/var/lib/kubelet/pods/57035aac-3298-49ff-990c-06d1c53f1e42/volumes" Jan 22 14:22:33 crc kubenswrapper[4948]: I0122 14:22:33.995907 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" path="/var/lib/kubelet/pods/a15930ec-26ea-4d6f-b1fc-2aa252298992/volumes" Jan 22 14:22:34 crc kubenswrapper[4948]: I0122 14:22:34.293873 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kppfw" event={"ID":"39643a3e-7da1-4e07-a5f0-1c53df079831","Type":"ContainerStarted","Data":"350e7d8275429767954a22032b2cf51d4e4cb7ff038192c26aff1769ece77067"} Jan 22 14:22:34 crc kubenswrapper[4948]: I0122 14:22:34.297340 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerStarted","Data":"53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82"} Jan 22 14:22:34 crc kubenswrapper[4948]: I0122 14:22:34.302310 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4wls" event={"ID":"62e1b4fe-576f-48ed-a7ea-a94dfd1af206","Type":"ContainerStarted","Data":"b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51"} Jan 22 14:22:34 crc kubenswrapper[4948]: I0122 14:22:34.319135 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kppfw" podStartSLOduration=2.2377631080000002 podStartE2EDuration="1m5.319110212s" podCreationTimestamp="2026-01-22 14:21:29 +0000 UTC" firstStartedPulling="2026-01-22 14:21:30.522848679 +0000 UTC m=+148.852521608" lastFinishedPulling="2026-01-22 14:22:33.604195773 +0000 UTC m=+211.933868712" observedRunningTime="2026-01-22 14:22:34.315405391 +0000 UTC m=+212.645078330" watchObservedRunningTime="2026-01-22 14:22:34.319110212 +0000 UTC m=+212.648783141" Jan 22 14:22:35 crc kubenswrapper[4948]: I0122 14:22:35.312466 4948 generic.go:334] "Generic (PLEG): container finished" podID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerID="b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51" exitCode=0 Jan 22 14:22:35 crc kubenswrapper[4948]: I0122 14:22:35.312564 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4wls" event={"ID":"62e1b4fe-576f-48ed-a7ea-a94dfd1af206","Type":"ContainerDied","Data":"b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51"} Jan 22 14:22:35 crc kubenswrapper[4948]: I0122 14:22:35.314661 4948 generic.go:334] "Generic (PLEG): container finished" podID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerID="53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82" exitCode=0 Jan 22 14:22:35 crc kubenswrapper[4948]: I0122 14:22:35.314696 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerDied","Data":"53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82"} Jan 22 14:22:36 crc kubenswrapper[4948]: I0122 14:22:36.321773 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerStarted","Data":"f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a"} Jan 22 14:22:36 crc kubenswrapper[4948]: I0122 14:22:36.324714 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4wls" event={"ID":"62e1b4fe-576f-48ed-a7ea-a94dfd1af206","Type":"ContainerStarted","Data":"7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156"} Jan 22 14:22:36 crc kubenswrapper[4948]: I0122 14:22:36.344056 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-6wr9b" podStartSLOduration=3.221377952 podStartE2EDuration="1m7.344039753s" podCreationTimestamp="2026-01-22 14:21:29 +0000 UTC" firstStartedPulling="2026-01-22 14:21:31.621635162 +0000 UTC m=+149.951308091" lastFinishedPulling="2026-01-22 14:22:35.744296953 +0000 UTC m=+214.073969892" observedRunningTime="2026-01-22 14:22:36.343442028 +0000 UTC m=+214.673114997" watchObservedRunningTime="2026-01-22 14:22:36.344039753 +0000 UTC m=+214.673712682" Jan 22 14:22:36 crc kubenswrapper[4948]: I0122 14:22:36.370387 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-z4wls" podStartSLOduration=3.348287285 podStartE2EDuration="1m6.370363571s" podCreationTimestamp="2026-01-22 14:21:30 +0000 UTC" firstStartedPulling="2026-01-22 14:21:32.704329789 +0000 UTC m=+151.034002718" lastFinishedPulling="2026-01-22 14:22:35.726406075 +0000 UTC m=+214.056079004" observedRunningTime="2026-01-22 14:22:36.364857011 +0000 UTC m=+214.694529940" watchObservedRunningTime="2026-01-22 14:22:36.370363571 +0000 UTC m=+214.700036530" Jan 22 14:22:39 crc kubenswrapper[4948]: I0122 14:22:39.282783 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zq2mw"] Jan 22 14:22:39 crc kubenswrapper[4948]: I0122 14:22:39.555990 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:22:39 crc kubenswrapper[4948]: I0122 14:22:39.556061 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:22:39 crc kubenswrapper[4948]: I0122 14:22:39.594585 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:22:40 crc kubenswrapper[4948]: I0122 14:22:40.206873 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:22:40 crc kubenswrapper[4948]: I0122 14:22:40.206942 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:22:40 crc kubenswrapper[4948]: I0122 14:22:40.401375 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:22:40 crc kubenswrapper[4948]: I0122 14:22:40.551633 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:22:40 crc kubenswrapper[4948]: I0122 14:22:40.551749 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:22:41 crc kubenswrapper[4948]: I0122 14:22:41.261580 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-6wr9b" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="registry-server" probeResult="failure" output=< Jan 22 14:22:41 crc kubenswrapper[4948]: timeout: failed to connect service ":50051" within 1s Jan 22 14:22:41 crc kubenswrapper[4948]: > Jan 22 14:22:41 crc kubenswrapper[4948]: I0122 14:22:41.415141 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:22:41 crc kubenswrapper[4948]: I0122 14:22:41.415230 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:22:41 crc kubenswrapper[4948]: I0122 14:22:41.415298 4948 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:22:41 crc kubenswrapper[4948]: I0122 14:22:41.416218 4948 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1"} pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:22:41 crc kubenswrapper[4948]: I0122 14:22:41.416361 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" containerID="cri-o://a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1" gracePeriod=600 Jan 22 14:22:41 crc kubenswrapper[4948]: I0122 14:22:41.612888 4948 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-z4wls" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="registry-server" probeResult="failure" output=< Jan 22 14:22:41 crc kubenswrapper[4948]: timeout: failed to connect service ":50051" within 1s Jan 22 14:22:41 crc kubenswrapper[4948]: > Jan 22 14:22:42 crc kubenswrapper[4948]: I0122 14:22:42.828562 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k"] Jan 22 14:22:42 crc kubenswrapper[4948]: I0122 14:22:42.828826 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" podUID="399eaa47-b30b-4736-90e1-5f888a9a8e66" containerName="controller-manager" containerID="cri-o://d797555b64b714a8d6bd32de4911fdc1b018498670356fdb232795bb0e3e0fd6" gracePeriod=30 Jan 22 14:22:42 crc kubenswrapper[4948]: I0122 14:22:42.906913 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95"] Jan 22 14:22:42 crc kubenswrapper[4948]: I0122 14:22:42.907232 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" podUID="8f8f939e-3201-4147-9768-6e087b892afd" containerName="route-controller-manager" containerID="cri-o://788a1b63babb2e046df8adf30169aeca27e02765edc23c7f1ca7ddcdaf5d2c49" gracePeriod=30 Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.188794 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kppfw"] Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.189368 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-kppfw" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="registry-server" containerID="cri-o://350e7d8275429767954a22032b2cf51d4e4cb7ff038192c26aff1769ece77067" gracePeriod=2 Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.367528 4948 generic.go:334] "Generic (PLEG): container finished" podID="8f8f939e-3201-4147-9768-6e087b892afd" containerID="788a1b63babb2e046df8adf30169aeca27e02765edc23c7f1ca7ddcdaf5d2c49" exitCode=0 Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.367652 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" event={"ID":"8f8f939e-3201-4147-9768-6e087b892afd","Type":"ContainerDied","Data":"788a1b63babb2e046df8adf30169aeca27e02765edc23c7f1ca7ddcdaf5d2c49"} Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.374516 4948 generic.go:334] "Generic (PLEG): container finished" podID="399eaa47-b30b-4736-90e1-5f888a9a8e66" containerID="d797555b64b714a8d6bd32de4911fdc1b018498670356fdb232795bb0e3e0fd6" exitCode=0 Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.374649 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" event={"ID":"399eaa47-b30b-4736-90e1-5f888a9a8e66","Type":"ContainerDied","Data":"d797555b64b714a8d6bd32de4911fdc1b018498670356fdb232795bb0e3e0fd6"} Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.377716 4948 generic.go:334] "Generic (PLEG): container finished" podID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerID="a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1" exitCode=0 Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.377810 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" event={"ID":"355ee4ab-8609-4b0c-ad77-bff335632d2d","Type":"ContainerDied","Data":"a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1"} Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.380554 4948 generic.go:334] "Generic (PLEG): container finished" podID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerID="350e7d8275429767954a22032b2cf51d4e4cb7ff038192c26aff1769ece77067" exitCode=0 Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.380582 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kppfw" event={"ID":"39643a3e-7da1-4e07-a5f0-1c53df079831","Type":"ContainerDied","Data":"350e7d8275429767954a22032b2cf51d4e4cb7ff038192c26aff1769ece77067"} Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.487910 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.493887 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.617031 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.648947 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8f939e-3201-4147-9768-6e087b892afd-serving-cert\") pod \"8f8f939e-3201-4147-9768-6e087b892afd\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649006 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-client-ca\") pod \"8f8f939e-3201-4147-9768-6e087b892afd\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649056 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-catalog-content\") pod \"39643a3e-7da1-4e07-a5f0-1c53df079831\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649082 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/399eaa47-b30b-4736-90e1-5f888a9a8e66-serving-cert\") pod \"399eaa47-b30b-4736-90e1-5f888a9a8e66\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649103 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2p6v\" (UniqueName: \"kubernetes.io/projected/8f8f939e-3201-4147-9768-6e087b892afd-kube-api-access-q2p6v\") pod \"8f8f939e-3201-4147-9768-6e087b892afd\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649162 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-config\") pod \"399eaa47-b30b-4736-90e1-5f888a9a8e66\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649853 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-client-ca" (OuterVolumeSpecName: "client-ca") pod "8f8f939e-3201-4147-9768-6e087b892afd" (UID: "8f8f939e-3201-4147-9768-6e087b892afd"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649962 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-config" (OuterVolumeSpecName: "config") pod "399eaa47-b30b-4736-90e1-5f888a9a8e66" (UID: "399eaa47-b30b-4736-90e1-5f888a9a8e66"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.649185 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d28fw\" (UniqueName: \"kubernetes.io/projected/399eaa47-b30b-4736-90e1-5f888a9a8e66-kube-api-access-d28fw\") pod \"399eaa47-b30b-4736-90e1-5f888a9a8e66\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.650086 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg5mq\" (UniqueName: \"kubernetes.io/projected/39643a3e-7da1-4e07-a5f0-1c53df079831-kube-api-access-xg5mq\") pod \"39643a3e-7da1-4e07-a5f0-1c53df079831\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.650419 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-config\") pod \"8f8f939e-3201-4147-9768-6e087b892afd\" (UID: \"8f8f939e-3201-4147-9768-6e087b892afd\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.650451 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-utilities\") pod \"39643a3e-7da1-4e07-a5f0-1c53df079831\" (UID: \"39643a3e-7da1-4e07-a5f0-1c53df079831\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.650474 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-proxy-ca-bundles\") pod \"399eaa47-b30b-4736-90e1-5f888a9a8e66\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.650494 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-client-ca\") pod \"399eaa47-b30b-4736-90e1-5f888a9a8e66\" (UID: \"399eaa47-b30b-4736-90e1-5f888a9a8e66\") " Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.650922 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.650951 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.651050 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-config" (OuterVolumeSpecName: "config") pod "8f8f939e-3201-4147-9768-6e087b892afd" (UID: "8f8f939e-3201-4147-9768-6e087b892afd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.651216 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-utilities" (OuterVolumeSpecName: "utilities") pod "39643a3e-7da1-4e07-a5f0-1c53df079831" (UID: "39643a3e-7da1-4e07-a5f0-1c53df079831"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.651329 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-client-ca" (OuterVolumeSpecName: "client-ca") pod "399eaa47-b30b-4736-90e1-5f888a9a8e66" (UID: "399eaa47-b30b-4736-90e1-5f888a9a8e66"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.651702 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "399eaa47-b30b-4736-90e1-5f888a9a8e66" (UID: "399eaa47-b30b-4736-90e1-5f888a9a8e66"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.655505 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/399eaa47-b30b-4736-90e1-5f888a9a8e66-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "399eaa47-b30b-4736-90e1-5f888a9a8e66" (UID: "399eaa47-b30b-4736-90e1-5f888a9a8e66"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.657901 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39643a3e-7da1-4e07-a5f0-1c53df079831-kube-api-access-xg5mq" (OuterVolumeSpecName: "kube-api-access-xg5mq") pod "39643a3e-7da1-4e07-a5f0-1c53df079831" (UID: "39643a3e-7da1-4e07-a5f0-1c53df079831"). InnerVolumeSpecName "kube-api-access-xg5mq". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.658194 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/399eaa47-b30b-4736-90e1-5f888a9a8e66-kube-api-access-d28fw" (OuterVolumeSpecName: "kube-api-access-d28fw") pod "399eaa47-b30b-4736-90e1-5f888a9a8e66" (UID: "399eaa47-b30b-4736-90e1-5f888a9a8e66"). InnerVolumeSpecName "kube-api-access-d28fw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.661786 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f8f939e-3201-4147-9768-6e087b892afd-kube-api-access-q2p6v" (OuterVolumeSpecName: "kube-api-access-q2p6v") pod "8f8f939e-3201-4147-9768-6e087b892afd" (UID: "8f8f939e-3201-4147-9768-6e087b892afd"). InnerVolumeSpecName "kube-api-access-q2p6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.661968 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f8f939e-3201-4147-9768-6e087b892afd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8f8f939e-3201-4147-9768-6e087b892afd" (UID: "8f8f939e-3201-4147-9768-6e087b892afd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.681404 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39643a3e-7da1-4e07-a5f0-1c53df079831" (UID: "39643a3e-7da1-4e07-a5f0-1c53df079831"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751334 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d28fw\" (UniqueName: \"kubernetes.io/projected/399eaa47-b30b-4736-90e1-5f888a9a8e66-kube-api-access-d28fw\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751368 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg5mq\" (UniqueName: \"kubernetes.io/projected/39643a3e-7da1-4e07-a5f0-1c53df079831-kube-api-access-xg5mq\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751377 4948 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8f8f939e-3201-4147-9768-6e087b892afd-config\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751386 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751396 4948 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751405 4948 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/399eaa47-b30b-4736-90e1-5f888a9a8e66-client-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751413 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8f8f939e-3201-4147-9768-6e087b892afd-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751421 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39643a3e-7da1-4e07-a5f0-1c53df079831-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751428 4948 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/399eaa47-b30b-4736-90e1-5f888a9a8e66-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:43 crc kubenswrapper[4948]: I0122 14:22:43.751436 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2p6v\" (UniqueName: \"kubernetes.io/projected/8f8f939e-3201-4147-9768-6e087b892afd-kube-api-access-q2p6v\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.255442 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6"] Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.255921 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="extract-utilities" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.255954 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="extract-utilities" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.255964 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="extract-utilities" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.255970 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="extract-utilities" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.255981 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="extract-content" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.255987 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="extract-content" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.255994 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="extract-utilities" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256000 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="extract-utilities" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.256024 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="399eaa47-b30b-4736-90e1-5f888a9a8e66" containerName="controller-manager" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256030 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="399eaa47-b30b-4736-90e1-5f888a9a8e66" containerName="controller-manager" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.256039 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256044 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.256051 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="extract-content" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256057 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="extract-content" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.256064 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256070 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.256078 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256084 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.256098 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8f8f939e-3201-4147-9768-6e087b892afd" containerName="route-controller-manager" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256104 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="8f8f939e-3201-4147-9768-6e087b892afd" containerName="route-controller-manager" Jan 22 14:22:44 crc kubenswrapper[4948]: E0122 14:22:44.256115 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="extract-content" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256121 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="extract-content" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256208 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="399eaa47-b30b-4736-90e1-5f888a9a8e66" containerName="controller-manager" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256220 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="8f8f939e-3201-4147-9768-6e087b892afd" containerName="route-controller-manager" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256233 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="57035aac-3298-49ff-990c-06d1c53f1e42" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256241 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256250 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="a15930ec-26ea-4d6f-b1fc-2aa252298992" containerName="registry-server" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.256628 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.261660 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.262378 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.266695 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.274264 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.360366 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-proxy-ca-bundles\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.360433 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-config\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.361186 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-client-ca\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.361247 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p66g8\" (UniqueName: \"kubernetes.io/projected/a06bca1c-05b1-40e3-b880-ee8b1124af9c-kube-api-access-p66g8\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.361311 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a06bca1c-05b1-40e3-b880-ee8b1124af9c-serving-cert\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.388375 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" event={"ID":"8f8f939e-3201-4147-9768-6e087b892afd","Type":"ContainerDied","Data":"20cf9a85c7c2d0ad47282034cef14605f96b6d2abf2be66383ba8c482be5d854"} Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.388410 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.388436 4948 scope.go:117] "RemoveContainer" containerID="788a1b63babb2e046df8adf30169aeca27e02765edc23c7f1ca7ddcdaf5d2c49" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.390560 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" event={"ID":"399eaa47-b30b-4736-90e1-5f888a9a8e66","Type":"ContainerDied","Data":"c0324a23be9aeb4fe8820234021db29f4c005961766854944cd4effe9a3aaac5"} Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.390733 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.392796 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" event={"ID":"355ee4ab-8609-4b0c-ad77-bff335632d2d","Type":"ContainerStarted","Data":"836de0e5942dea29937e5a32fc50b884228d1d0096c1c8709d05b93b6a8d1e77"} Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.397516 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kppfw" event={"ID":"39643a3e-7da1-4e07-a5f0-1c53df079831","Type":"ContainerDied","Data":"cdb048502199d8b2d4052e8f6b288cc023f929d7977ee1dc58313a236f48dd12"} Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.397563 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kppfw" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.409079 4948 scope.go:117] "RemoveContainer" containerID="d797555b64b714a8d6bd32de4911fdc1b018498670356fdb232795bb0e3e0fd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.410364 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.412917 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7f4676974b-5tz95"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.426177 4948 scope.go:117] "RemoveContainer" containerID="350e7d8275429767954a22032b2cf51d4e4cb7ff038192c26aff1769ece77067" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.452399 4948 scope.go:117] "RemoveContainer" containerID="059bf71a51b973b2850107e64e4ad7b30e3935dc08b07d3946f7f25f431b8368" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.456285 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-kppfw"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.458549 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-kppfw"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465377 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p66g8\" (UniqueName: \"kubernetes.io/projected/a06bca1c-05b1-40e3-b880-ee8b1124af9c-kube-api-access-p66g8\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465436 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b65e62b-30f5-4b14-a610-6aea234d41b0-serving-cert\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465463 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a06bca1c-05b1-40e3-b880-ee8b1124af9c-serving-cert\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465486 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ckdjn\" (UniqueName: \"kubernetes.io/projected/5b65e62b-30f5-4b14-a610-6aea234d41b0-kube-api-access-ckdjn\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465520 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b65e62b-30f5-4b14-a610-6aea234d41b0-config\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465544 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-proxy-ca-bundles\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465566 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-config\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465592 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b65e62b-30f5-4b14-a610-6aea234d41b0-client-ca\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.465616 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-client-ca\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.466519 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-client-ca\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.467250 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.468128 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-proxy-ca-bundles\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.468518 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-5c66c6d6cd-swp8k"] Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.469199 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a06bca1c-05b1-40e3-b880-ee8b1124af9c-config\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.469344 4948 scope.go:117] "RemoveContainer" containerID="fb44e493f473531da958e4d3697468f3c1f1c9be95ca622a92c806fd949955f1" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.471517 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a06bca1c-05b1-40e3-b880-ee8b1124af9c-serving-cert\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.485708 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p66g8\" (UniqueName: \"kubernetes.io/projected/a06bca1c-05b1-40e3-b880-ee8b1124af9c-kube-api-access-p66g8\") pod \"controller-manager-6cb5c7cf9d-89xd6\" (UID: \"a06bca1c-05b1-40e3-b880-ee8b1124af9c\") " pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.566521 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b65e62b-30f5-4b14-a610-6aea234d41b0-client-ca\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.566639 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b65e62b-30f5-4b14-a610-6aea234d41b0-serving-cert\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.566678 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ckdjn\" (UniqueName: \"kubernetes.io/projected/5b65e62b-30f5-4b14-a610-6aea234d41b0-kube-api-access-ckdjn\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.567078 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b65e62b-30f5-4b14-a610-6aea234d41b0-config\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.567987 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5b65e62b-30f5-4b14-a610-6aea234d41b0-client-ca\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.568188 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5b65e62b-30f5-4b14-a610-6aea234d41b0-config\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.571512 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5b65e62b-30f5-4b14-a610-6aea234d41b0-serving-cert\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.590408 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ckdjn\" (UniqueName: \"kubernetes.io/projected/5b65e62b-30f5-4b14-a610-6aea234d41b0-kube-api-access-ckdjn\") pod \"route-controller-manager-7c677c5d58-s7gxt\" (UID: \"5b65e62b-30f5-4b14-a610-6aea234d41b0\") " pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.604068 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:44 crc kubenswrapper[4948]: I0122 14:22:44.614511 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:45 crc kubenswrapper[4948]: I0122 14:22:45.040448 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6"] Jan 22 14:22:45 crc kubenswrapper[4948]: W0122 14:22:45.054734 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda06bca1c_05b1_40e3_b880_ee8b1124af9c.slice/crio-fcde50380da78b75185f62dfdb7705b43896dea721f00b7a326ad6e580d451be WatchSource:0}: Error finding container fcde50380da78b75185f62dfdb7705b43896dea721f00b7a326ad6e580d451be: Status 404 returned error can't find the container with id fcde50380da78b75185f62dfdb7705b43896dea721f00b7a326ad6e580d451be Jan 22 14:22:45 crc kubenswrapper[4948]: I0122 14:22:45.089108 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt"] Jan 22 14:22:45 crc kubenswrapper[4948]: I0122 14:22:45.406583 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" event={"ID":"a06bca1c-05b1-40e3-b880-ee8b1124af9c","Type":"ContainerStarted","Data":"fcde50380da78b75185f62dfdb7705b43896dea721f00b7a326ad6e580d451be"} Jan 22 14:22:45 crc kubenswrapper[4948]: I0122 14:22:45.410254 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" event={"ID":"5b65e62b-30f5-4b14-a610-6aea234d41b0","Type":"ContainerStarted","Data":"6dface440dbeecd9a26f9421b478b19ebe1d0958ac95bdac85e76db6cb29b505"} Jan 22 14:22:45 crc kubenswrapper[4948]: I0122 14:22:45.964865 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39643a3e-7da1-4e07-a5f0-1c53df079831" path="/var/lib/kubelet/pods/39643a3e-7da1-4e07-a5f0-1c53df079831/volumes" Jan 22 14:22:45 crc kubenswrapper[4948]: I0122 14:22:45.965695 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="399eaa47-b30b-4736-90e1-5f888a9a8e66" path="/var/lib/kubelet/pods/399eaa47-b30b-4736-90e1-5f888a9a8e66/volumes" Jan 22 14:22:45 crc kubenswrapper[4948]: I0122 14:22:45.966268 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f8f939e-3201-4147-9768-6e087b892afd" path="/var/lib/kubelet/pods/8f8f939e-3201-4147-9768-6e087b892afd/volumes" Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.424381 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" event={"ID":"5b65e62b-30f5-4b14-a610-6aea234d41b0","Type":"ContainerStarted","Data":"fc4c129d3b24a8c98346131fe9cae8fdf4dcc86d0a7622afee7dcd40fb955628"} Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.424706 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.425675 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" event={"ID":"a06bca1c-05b1-40e3-b880-ee8b1124af9c","Type":"ContainerStarted","Data":"0c8c4b68557c260ca0207755bea841c1f2b040d5752c71b046873f8f1e78d787"} Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.426248 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.432567 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.440900 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.460818 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-7c677c5d58-s7gxt" podStartSLOduration=5.460797122 podStartE2EDuration="5.460797122s" podCreationTimestamp="2026-01-22 14:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:47.457392549 +0000 UTC m=+225.787065488" watchObservedRunningTime="2026-01-22 14:22:47.460797122 +0000 UTC m=+225.790470061" Jan 22 14:22:47 crc kubenswrapper[4948]: I0122 14:22:47.541846 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6cb5c7cf9d-89xd6" podStartSLOduration=5.5418227 podStartE2EDuration="5.5418227s" podCreationTimestamp="2026-01-22 14:22:42 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:22:47.539869966 +0000 UTC m=+225.869542915" watchObservedRunningTime="2026-01-22 14:22:47.5418227 +0000 UTC m=+225.871495629" Jan 22 14:22:50 crc kubenswrapper[4948]: I0122 14:22:50.273074 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:22:50 crc kubenswrapper[4948]: I0122 14:22:50.319772 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:22:50 crc kubenswrapper[4948]: I0122 14:22:50.607788 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:22:50 crc kubenswrapper[4948]: I0122 14:22:50.659859 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:22:50 crc kubenswrapper[4948]: I0122 14:22:50.986712 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z4wls"] Jan 22 14:22:52 crc kubenswrapper[4948]: I0122 14:22:52.456664 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-z4wls" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="registry-server" containerID="cri-o://7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156" gracePeriod=2 Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.103057 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.201647 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-catalog-content\") pod \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.202497 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65dg7\" (UniqueName: \"kubernetes.io/projected/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-kube-api-access-65dg7\") pod \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.202554 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-utilities\") pod \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\" (UID: \"62e1b4fe-576f-48ed-a7ea-a94dfd1af206\") " Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.203609 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-utilities" (OuterVolumeSpecName: "utilities") pod "62e1b4fe-576f-48ed-a7ea-a94dfd1af206" (UID: "62e1b4fe-576f-48ed-a7ea-a94dfd1af206"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.210343 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-kube-api-access-65dg7" (OuterVolumeSpecName: "kube-api-access-65dg7") pod "62e1b4fe-576f-48ed-a7ea-a94dfd1af206" (UID: "62e1b4fe-576f-48ed-a7ea-a94dfd1af206"). InnerVolumeSpecName "kube-api-access-65dg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.304047 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65dg7\" (UniqueName: \"kubernetes.io/projected/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-kube-api-access-65dg7\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.304321 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.331259 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62e1b4fe-576f-48ed-a7ea-a94dfd1af206" (UID: "62e1b4fe-576f-48ed-a7ea-a94dfd1af206"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.404993 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62e1b4fe-576f-48ed-a7ea-a94dfd1af206-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.470928 4948 generic.go:334] "Generic (PLEG): container finished" podID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerID="7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156" exitCode=0 Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.471009 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4wls" event={"ID":"62e1b4fe-576f-48ed-a7ea-a94dfd1af206","Type":"ContainerDied","Data":"7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156"} Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.471152 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-z4wls" event={"ID":"62e1b4fe-576f-48ed-a7ea-a94dfd1af206","Type":"ContainerDied","Data":"3f9c41f4a5ee292c95a74bd69aaab9b5e1c4fb2006d74a456f71e7df7fee6e88"} Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.471192 4948 scope.go:117] "RemoveContainer" containerID="7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.471310 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-z4wls" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.493124 4948 scope.go:117] "RemoveContainer" containerID="b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.503838 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-z4wls"] Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.511484 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-z4wls"] Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.521105 4948 scope.go:117] "RemoveContainer" containerID="ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.543007 4948 scope.go:117] "RemoveContainer" containerID="7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.543506 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156\": container with ID starting with 7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156 not found: ID does not exist" containerID="7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.543548 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156"} err="failed to get container status \"7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156\": rpc error: code = NotFound desc = could not find container \"7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156\": container with ID starting with 7d8c0503decf0d8c91b32565390e34c1c41852f69617690cfc844c67b2f28156 not found: ID does not exist" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.543581 4948 scope.go:117] "RemoveContainer" containerID="b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.544393 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51\": container with ID starting with b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51 not found: ID does not exist" containerID="b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.544474 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51"} err="failed to get container status \"b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51\": rpc error: code = NotFound desc = could not find container \"b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51\": container with ID starting with b5371644deae6a58c1c591fe54d5ce252574e62d93f79b0492a0795d28c23f51 not found: ID does not exist" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.544536 4948 scope.go:117] "RemoveContainer" containerID="ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.544965 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348\": container with ID starting with ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348 not found: ID does not exist" containerID="ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.544997 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348"} err="failed to get container status \"ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348\": rpc error: code = NotFound desc = could not find container \"ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348\": container with ID starting with ac7269cf52cf994b5390189192e1eddda1b8fbd17d51db1eb1ee97d360bce348 not found: ID does not exist" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.782276 4948 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.782688 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="extract-content" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.782714 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="extract-content" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.782733 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="extract-utilities" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.782747 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="extract-utilities" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.782781 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="registry-server" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.782794 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="registry-server" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.783007 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" containerName="registry-server" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.783651 4948 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.784131 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db" gracePeriod=15 Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.784261 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc" gracePeriod=15 Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.784341 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e" gracePeriod=15 Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.784273 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.784441 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa" gracePeriod=15 Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.785460 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f" gracePeriod=15 Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.786511 4948 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.786936 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.786979 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.787006 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787059 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.787077 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787092 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.787112 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787128 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.787148 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787164 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.787197 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787216 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.787235 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787250 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787506 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787557 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787579 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787612 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787635 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.787657 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813586 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813661 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813686 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813716 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813746 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813768 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813873 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.813936 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: E0122 14:22:54.829850 4948 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914585 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914719 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914759 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914775 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914836 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914883 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914911 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914953 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.914975 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.915006 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.915088 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.915129 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.915156 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.915221 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.915276 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:54 crc kubenswrapper[4948]: I0122 14:22:54.915230 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.131118 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:55 crc kubenswrapper[4948]: W0122 14:22:55.154893 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-469e58b73ad52d6a9f329598f87df98f6f8b59f45b06ca6e7f4435e583f71f04 WatchSource:0}: Error finding container 469e58b73ad52d6a9f329598f87df98f6f8b59f45b06ca6e7f4435e583f71f04: Status 404 returned error can't find the container with id 469e58b73ad52d6a9f329598f87df98f6f8b59f45b06ca6e7f4435e583f71f04 Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.158416 4948 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.22:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d1398b1548cbb openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 14:22:55.157537979 +0000 UTC m=+233.487210918,LastTimestamp:2026-01-22 14:22:55.157537979 +0000 UTC m=+233.487210918,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.479362 4948 generic.go:334] "Generic (PLEG): container finished" podID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" containerID="52c4281899dbbf57a17ff12142db6d062f7e1def8d22791022c5d7c24e4bf4ff" exitCode=0 Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.479457 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4a6ff136-0872-4b4b-9104-d7c66e20d6f3","Type":"ContainerDied","Data":"52c4281899dbbf57a17ff12142db6d062f7e1def8d22791022c5d7c24e4bf4ff"} Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.480566 4948 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.480929 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.482205 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.483222 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.483958 4948 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc" exitCode=0 Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.483976 4948 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa" exitCode=0 Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.483985 4948 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e" exitCode=0 Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.483995 4948 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f" exitCode=2 Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.484112 4948 scope.go:117] "RemoveContainer" containerID="88bfb776b014c3b9eaaea039851bd7774749db99f934b6dcc65b4eeb3d90a10d" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.485947 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55"} Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.485987 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"469e58b73ad52d6a9f329598f87df98f6f8b59f45b06ca6e7f4435e583f71f04"} Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.486569 4948 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.486663 4948 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.486834 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.491454 4948 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.22:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d1398b1548cbb openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 14:22:55.157537979 +0000 UTC m=+233.487210918,LastTimestamp:2026-01-22 14:22:55.157537979 +0000 UTC m=+233.487210918,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.504167 4948 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.504857 4948 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.505317 4948 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.505961 4948 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.506410 4948 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.506444 4948 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.506777 4948 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="200ms" Jan 22 14:22:55 crc kubenswrapper[4948]: E0122 14:22:55.708262 4948 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="400ms" Jan 22 14:22:55 crc kubenswrapper[4948]: I0122 14:22:55.969934 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62e1b4fe-576f-48ed-a7ea-a94dfd1af206" path="/var/lib/kubelet/pods/62e1b4fe-576f-48ed-a7ea-a94dfd1af206/volumes" Jan 22 14:22:56 crc kubenswrapper[4948]: E0122 14:22:56.109208 4948 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="800ms" Jan 22 14:22:56 crc kubenswrapper[4948]: I0122 14:22:56.499879 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 14:22:56 crc kubenswrapper[4948]: E0122 14:22:56.910450 4948 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="1.6s" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.013661 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.014860 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.144496 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kube-api-access\") pod \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.144558 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-var-lock\") pod \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.144608 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kubelet-dir\") pod \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\" (UID: \"4a6ff136-0872-4b4b-9104-d7c66e20d6f3\") " Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.144683 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-var-lock" (OuterVolumeSpecName: "var-lock") pod "4a6ff136-0872-4b4b-9104-d7c66e20d6f3" (UID: "4a6ff136-0872-4b4b-9104-d7c66e20d6f3"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.144739 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4a6ff136-0872-4b4b-9104-d7c66e20d6f3" (UID: "4a6ff136-0872-4b4b-9104-d7c66e20d6f3"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.145170 4948 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.145205 4948 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kubelet-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.147836 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.148692 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.149246 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.149488 4948 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.152073 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4a6ff136-0872-4b4b-9104-d7c66e20d6f3" (UID: "4a6ff136-0872-4b4b-9104-d7c66e20d6f3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.246194 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4a6ff136-0872-4b4b-9104-d7c66e20d6f3-kube-api-access\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.347594 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.347758 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.347911 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.347959 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.348069 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.348178 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.348405 4948 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.348431 4948 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.348450 4948 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.510204 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4a6ff136-0872-4b4b-9104-d7c66e20d6f3","Type":"ContainerDied","Data":"6d6521437881ad98c886f006840dadf8f1018f758cefbb5a8e6cf72ed700f08f"} Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.510253 4948 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6d6521437881ad98c886f006840dadf8f1018f758cefbb5a8e6cf72ed700f08f" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.510255 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.514038 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.514934 4948 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db" exitCode=0 Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.514979 4948 scope.go:117] "RemoveContainer" containerID="cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.515151 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.535984 4948 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.536788 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.539845 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.540414 4948 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.541364 4948 scope.go:117] "RemoveContainer" containerID="0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.562599 4948 scope.go:117] "RemoveContainer" containerID="f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.583740 4948 scope.go:117] "RemoveContainer" containerID="c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.607664 4948 scope.go:117] "RemoveContainer" containerID="1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.632168 4948 scope.go:117] "RemoveContainer" containerID="c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.670458 4948 scope.go:117] "RemoveContainer" containerID="cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc" Jan 22 14:22:57 crc kubenswrapper[4948]: E0122 14:22:57.671153 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\": container with ID starting with cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc not found: ID does not exist" containerID="cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.671226 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc"} err="failed to get container status \"cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\": rpc error: code = NotFound desc = could not find container \"cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc\": container with ID starting with cdaa04391795352593125d1a5d69381da4e9c3ee9830a6077ac86ed9013f55bc not found: ID does not exist" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.671255 4948 scope.go:117] "RemoveContainer" containerID="0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa" Jan 22 14:22:57 crc kubenswrapper[4948]: E0122 14:22:57.672435 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\": container with ID starting with 0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa not found: ID does not exist" containerID="0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.672493 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa"} err="failed to get container status \"0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\": rpc error: code = NotFound desc = could not find container \"0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa\": container with ID starting with 0ed66900df0696f1a432c36be6c6aaaf659f6548f99530df659ebea62c773bfa not found: ID does not exist" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.672530 4948 scope.go:117] "RemoveContainer" containerID="f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e" Jan 22 14:22:57 crc kubenswrapper[4948]: E0122 14:22:57.673159 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\": container with ID starting with f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e not found: ID does not exist" containerID="f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.673229 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e"} err="failed to get container status \"f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\": rpc error: code = NotFound desc = could not find container \"f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e\": container with ID starting with f17fc6f4d08a86cde0399ff0a6b5ad7b4f7f727f8cc012044f47c7fe74f1205e not found: ID does not exist" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.673268 4948 scope.go:117] "RemoveContainer" containerID="c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f" Jan 22 14:22:57 crc kubenswrapper[4948]: E0122 14:22:57.673599 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\": container with ID starting with c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f not found: ID does not exist" containerID="c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.673643 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f"} err="failed to get container status \"c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\": rpc error: code = NotFound desc = could not find container \"c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f\": container with ID starting with c0738b84f6dfc90a75647bcbf7fa2766df3167fbc6736413503b5d47b5399b3f not found: ID does not exist" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.673669 4948 scope.go:117] "RemoveContainer" containerID="1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db" Jan 22 14:22:57 crc kubenswrapper[4948]: E0122 14:22:57.674342 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\": container with ID starting with 1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db not found: ID does not exist" containerID="1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.674400 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db"} err="failed to get container status \"1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\": rpc error: code = NotFound desc = could not find container \"1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db\": container with ID starting with 1550baaec66c53ecf5e20e02bebe27c5aca3a6e8808b02d6906015544300d5db not found: ID does not exist" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.674430 4948 scope.go:117] "RemoveContainer" containerID="c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9" Jan 22 14:22:57 crc kubenswrapper[4948]: E0122 14:22:57.674764 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\": container with ID starting with c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9 not found: ID does not exist" containerID="c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.674805 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9"} err="failed to get container status \"c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\": rpc error: code = NotFound desc = could not find container \"c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9\": container with ID starting with c0cb13911d71cf329a6f0b42f12e70b5c3d5931fed554f5cd1a02f392cdb73e9 not found: ID does not exist" Jan 22 14:22:57 crc kubenswrapper[4948]: I0122 14:22:57.972461 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Jan 22 14:22:58 crc kubenswrapper[4948]: E0122 14:22:58.511604 4948 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="3.2s" Jan 22 14:22:59 crc kubenswrapper[4948]: E0122 14:22:59.538135 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:22:59Z\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:22:59Z\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:22:59Z\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-01-22T14:22:59Z\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"}]}}\" for node \"crc\": Patch \"https://api-int.crc.testing:6443/api/v1/nodes/crc/status?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:59 crc kubenswrapper[4948]: E0122 14:22:59.538686 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:59 crc kubenswrapper[4948]: E0122 14:22:59.539081 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:59 crc kubenswrapper[4948]: E0122 14:22:59.539446 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:59 crc kubenswrapper[4948]: E0122 14:22:59.539804 4948 kubelet_node_status.go:585] "Error updating node status, will retry" err="error getting node \"crc\": Get \"https://api-int.crc.testing:6443/api/v1/nodes/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:22:59 crc kubenswrapper[4948]: E0122 14:22:59.539844 4948 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Jan 22 14:23:01 crc kubenswrapper[4948]: E0122 14:23:01.712558 4948 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.22:6443: connect: connection refused" interval="6.4s" Jan 22 14:23:01 crc kubenswrapper[4948]: I0122 14:23:01.964957 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.325222 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" containerName="oauth-openshift" containerID="cri-o://fc24d6e9050886c77a04f9569e590982da2bbbddd6e021cf5a85ef331a7b6fa9" gracePeriod=15 Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.570686 4948 generic.go:334] "Generic (PLEG): container finished" podID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" containerID="fc24d6e9050886c77a04f9569e590982da2bbbddd6e021cf5a85ef331a7b6fa9" exitCode=0 Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.570731 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" event={"ID":"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6","Type":"ContainerDied","Data":"fc24d6e9050886c77a04f9569e590982da2bbbddd6e021cf5a85ef331a7b6fa9"} Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.897141 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.898893 4948 status_manager.go:851] "Failed to get status for pod" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zq2mw\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.899374 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950185 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-provider-selection\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950255 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-idp-0-file-data\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950283 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-service-ca\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950314 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-cliconfig\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950349 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-error\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950380 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-session\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950406 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-router-certs\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950457 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-dir\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950484 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-ocp-branding-template\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950508 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-login\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950527 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hx5nz\" (UniqueName: \"kubernetes.io/projected/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-kube-api-access-hx5nz\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950565 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-serving-cert\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950594 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-trusted-ca-bundle\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.950613 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-policies\") pod \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\" (UID: \"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6\") " Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.951689 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.952082 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.952121 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.952159 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.952948 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.961210 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.961510 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-kube-api-access-hx5nz" (OuterVolumeSpecName: "kube-api-access-hx5nz") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "kube-api-access-hx5nz". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.961557 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.961775 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.965328 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.965841 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.966393 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.966804 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:04 crc kubenswrapper[4948]: I0122 14:23:04.968539 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" (UID: "6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060161 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060205 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060217 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060226 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060279 4948 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060402 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060413 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060422 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hx5nz\" (UniqueName: \"kubernetes.io/projected/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-kube-api-access-hx5nz\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060431 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060439 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060453 4948 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-audit-policies\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060464 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060478 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.060487 4948 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:05 crc kubenswrapper[4948]: E0122 14:23:05.493528 4948 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.22:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.188d1398b1548cbb openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-01-22 14:22:55.157537979 +0000 UTC m=+233.487210918,LastTimestamp:2026-01-22 14:22:55.157537979 +0000 UTC m=+233.487210918,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.576631 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" event={"ID":"6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6","Type":"ContainerDied","Data":"48ab9e553ceeb06e5cf1a69c4535072ac97e1e436904cfb949fe585162cd3308"} Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.576681 4948 scope.go:117] "RemoveContainer" containerID="fc24d6e9050886c77a04f9569e590982da2bbbddd6e021cf5a85ef331a7b6fa9" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.576810 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.577432 4948 status_manager.go:851] "Failed to get status for pod" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zq2mw\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.577657 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.594720 4948 status_manager.go:851] "Failed to get status for pod" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zq2mw\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.595809 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.957035 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.957903 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.958397 4948 status_manager.go:851] "Failed to get status for pod" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zq2mw\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.974234 4948 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.974267 4948 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:05 crc kubenswrapper[4948]: E0122 14:23:05.974718 4948 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:05 crc kubenswrapper[4948]: I0122 14:23:05.976244 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:05 crc kubenswrapper[4948]: W0122 14:23:05.998929 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-a079dd9b67cb085153d687f3d39c87a05b9ec7ac4891aea75aaa74a223aae2fd WatchSource:0}: Error finding container a079dd9b67cb085153d687f3d39c87a05b9ec7ac4891aea75aaa74a223aae2fd: Status 404 returned error can't find the container with id a079dd9b67cb085153d687f3d39c87a05b9ec7ac4891aea75aaa74a223aae2fd Jan 22 14:23:06 crc kubenswrapper[4948]: I0122 14:23:06.582791 4948 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="5579f77ed4de37110f01c1dd39f0df35b1ab88019befc9d7d708420f28ee489d" exitCode=0 Jan 22 14:23:06 crc kubenswrapper[4948]: I0122 14:23:06.582843 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"5579f77ed4de37110f01c1dd39f0df35b1ab88019befc9d7d708420f28ee489d"} Jan 22 14:23:06 crc kubenswrapper[4948]: I0122 14:23:06.583144 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a079dd9b67cb085153d687f3d39c87a05b9ec7ac4891aea75aaa74a223aae2fd"} Jan 22 14:23:06 crc kubenswrapper[4948]: I0122 14:23:06.583617 4948 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:06 crc kubenswrapper[4948]: I0122 14:23:06.583647 4948 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:06 crc kubenswrapper[4948]: I0122 14:23:06.584232 4948 status_manager.go:851] "Failed to get status for pod" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:06 crc kubenswrapper[4948]: E0122 14:23:06.584476 4948 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.22:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:06 crc kubenswrapper[4948]: I0122 14:23:06.584487 4948 status_manager.go:851] "Failed to get status for pod" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" pod="openshift-authentication/oauth-openshift-558db77b4-zq2mw" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-558db77b4-zq2mw\": dial tcp 38.102.83.22:6443: connect: connection refused" Jan 22 14:23:07 crc kubenswrapper[4948]: I0122 14:23:07.592656 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ec3f57acf9e716e9ba07442e0b396dee0421843315187d2ea0dfb1512a108d28"} Jan 22 14:23:07 crc kubenswrapper[4948]: I0122 14:23:07.592963 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bb51595da87ecab96a78e9f399677b02714b37f163eea8693e0c6f60a5bf04a0"} Jan 22 14:23:07 crc kubenswrapper[4948]: I0122 14:23:07.592978 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"56babc0f308ba88a9f0436be2962626fe6df79b909c2d1b003815dcbfae3f9a3"} Jan 22 14:23:07 crc kubenswrapper[4948]: I0122 14:23:07.592988 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"ee82972c93d9e0308b5bba53b9082b61650db1782213524cecdfa31862d31055"} Jan 22 14:23:08 crc kubenswrapper[4948]: I0122 14:23:08.601697 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"367911f9d326c42fc99ceb46585ead70bd0600200b31843c192d4b36f5bf79c3"} Jan 22 14:23:08 crc kubenswrapper[4948]: I0122 14:23:08.601967 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:08 crc kubenswrapper[4948]: I0122 14:23:08.601961 4948 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:08 crc kubenswrapper[4948]: I0122 14:23:08.601994 4948 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:09 crc kubenswrapper[4948]: I0122 14:23:09.610192 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 14:23:09 crc kubenswrapper[4948]: I0122 14:23:09.610245 4948 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0" exitCode=1 Jan 22 14:23:09 crc kubenswrapper[4948]: I0122 14:23:09.610277 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0"} Jan 22 14:23:09 crc kubenswrapper[4948]: I0122 14:23:09.610699 4948 scope.go:117] "RemoveContainer" containerID="52f7a4d3f394657276ce00d96ebf9e1c33801cabfdeb17f7f9f56b701b9ef3a0" Jan 22 14:23:10 crc kubenswrapper[4948]: I0122 14:23:10.622605 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Jan 22 14:23:10 crc kubenswrapper[4948]: I0122 14:23:10.623167 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a94fca2b0ae56b10b6d97f81766e56b7da786d1eaf3a9676adc01bcdaacdb718"} Jan 22 14:23:10 crc kubenswrapper[4948]: I0122 14:23:10.976806 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:10 crc kubenswrapper[4948]: I0122 14:23:10.976874 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:10 crc kubenswrapper[4948]: I0122 14:23:10.985435 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:12 crc kubenswrapper[4948]: I0122 14:23:12.961058 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:23:12 crc kubenswrapper[4948]: I0122 14:23:12.968111 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:23:13 crc kubenswrapper[4948]: I0122 14:23:13.616712 4948 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:13 crc kubenswrapper[4948]: I0122 14:23:13.641321 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:23:13 crc kubenswrapper[4948]: I0122 14:23:13.701495 4948 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8efa724f-107d-4aa3-8e37-487ab9909b8f" Jan 22 14:23:14 crc kubenswrapper[4948]: I0122 14:23:14.651418 4948 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:14 crc kubenswrapper[4948]: I0122 14:23:14.651474 4948 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:14 crc kubenswrapper[4948]: I0122 14:23:14.656346 4948 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8efa724f-107d-4aa3-8e37-487ab9909b8f" Jan 22 14:23:14 crc kubenswrapper[4948]: I0122 14:23:14.661769 4948 status_manager.go:308] "Container readiness changed before pod has synced" pod="openshift-kube-apiserver/kube-apiserver-crc" containerID="cri-o://ee82972c93d9e0308b5bba53b9082b61650db1782213524cecdfa31862d31055" Jan 22 14:23:14 crc kubenswrapper[4948]: I0122 14:23:14.662062 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:15 crc kubenswrapper[4948]: I0122 14:23:15.658978 4948 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:15 crc kubenswrapper[4948]: I0122 14:23:15.659455 4948 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="70cc2753-c3d6-409c-ad8a-9dabbb34462d" Jan 22 14:23:15 crc kubenswrapper[4948]: I0122 14:23:15.663224 4948 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="8efa724f-107d-4aa3-8e37-487ab9909b8f" Jan 22 14:23:20 crc kubenswrapper[4948]: I0122 14:23:20.068863 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Jan 22 14:23:20 crc kubenswrapper[4948]: I0122 14:23:20.780179 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Jan 22 14:23:21 crc kubenswrapper[4948]: I0122 14:23:21.151828 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Jan 22 14:23:21 crc kubenswrapper[4948]: I0122 14:23:21.239327 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:23:22 crc kubenswrapper[4948]: I0122 14:23:22.242083 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Jan 22 14:23:22 crc kubenswrapper[4948]: I0122 14:23:22.257037 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Jan 22 14:23:23 crc kubenswrapper[4948]: I0122 14:23:23.102696 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Jan 22 14:23:24 crc kubenswrapper[4948]: I0122 14:23:24.081863 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Jan 22 14:23:25 crc kubenswrapper[4948]: I0122 14:23:25.296388 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Jan 22 14:23:25 crc kubenswrapper[4948]: I0122 14:23:25.439185 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Jan 22 14:23:25 crc kubenswrapper[4948]: I0122 14:23:25.677261 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Jan 22 14:23:25 crc kubenswrapper[4948]: I0122 14:23:25.696486 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Jan 22 14:23:26 crc kubenswrapper[4948]: I0122 14:23:26.140451 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Jan 22 14:23:26 crc kubenswrapper[4948]: I0122 14:23:26.176170 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Jan 22 14:23:26 crc kubenswrapper[4948]: I0122 14:23:26.195977 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Jan 22 14:23:26 crc kubenswrapper[4948]: I0122 14:23:26.554812 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Jan 22 14:23:26 crc kubenswrapper[4948]: I0122 14:23:26.595298 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Jan 22 14:23:26 crc kubenswrapper[4948]: I0122 14:23:26.771541 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.319889 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.418310 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.466473 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.536941 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.758046 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.787936 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.960166 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.973976 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Jan 22 14:23:27 crc kubenswrapper[4948]: I0122 14:23:27.997217 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.180477 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.453282 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.495819 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.796252 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.847493 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.912899 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.935665 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Jan 22 14:23:28 crc kubenswrapper[4948]: I0122 14:23:28.984960 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.157083 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.173605 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.197921 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.220323 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.283915 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.365254 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.409189 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.428296 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.477568 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.525244 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.527141 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.540911 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.606780 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.610339 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.697210 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.708943 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.905043 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Jan 22 14:23:29 crc kubenswrapper[4948]: I0122 14:23:29.936750 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.067318 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.081944 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.094368 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.155703 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.228656 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.278826 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.284963 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.285002 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.323488 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.340173 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.470181 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.690810 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.791664 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.866276 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.881908 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Jan 22 14:23:30 crc kubenswrapper[4948]: I0122 14:23:30.996726 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.166369 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.172513 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.183550 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.190735 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.325609 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.367432 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.424195 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.489543 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.612577 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.730327 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.758215 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.763820 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.825214 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.886902 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Jan 22 14:23:31 crc kubenswrapper[4948]: I0122 14:23:31.918647 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.040299 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.203418 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.265121 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.351847 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.415305 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.423608 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.446731 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.477161 4948 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.509042 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.546875 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.593961 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.598288 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.657976 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.688178 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.750619 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Jan 22 14:23:32 crc kubenswrapper[4948]: I0122 14:23:32.953822 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.016631 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.017684 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.020497 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.033047 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.093315 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.143866 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.165436 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.289120 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.390312 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.393797 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.394637 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.402784 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.518647 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.547513 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.589288 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.590261 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.640681 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.740360 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.741621 4948 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.811059 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.847693 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.854691 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.870517 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.881144 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Jan 22 14:23:33 crc kubenswrapper[4948]: I0122 14:23:33.965081 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.012346 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.067632 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.158780 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.203963 4948 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.208714 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-zq2mw","openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.208772 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.213063 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.228118 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=21.22808818 podStartE2EDuration="21.22808818s" podCreationTimestamp="2026-01-22 14:23:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:23:34.224396199 +0000 UTC m=+272.554069128" watchObservedRunningTime="2026-01-22 14:23:34.22808818 +0000 UTC m=+272.557761149" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.289232 4948 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.312166 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.380742 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.509766 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.525079 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.585444 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.662613 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.708591 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.710180 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.754642 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Jan 22 14:23:34 crc kubenswrapper[4948]: I0122 14:23:34.775492 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.036240 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.129482 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.141592 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.224716 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.331311 4948 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.333057 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.473538 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.473726 4948 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.475453 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.475623 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.490125 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.673500 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.713287 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.748705 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.820902 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.919404 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.950352 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Jan 22 14:23:35 crc kubenswrapper[4948]: I0122 14:23:35.964252 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" path="/var/lib/kubelet/pods/6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6/volumes" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.002915 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.020420 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.132195 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.151898 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.208447 4948 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.208695 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55" gracePeriod=5 Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.247295 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.279617 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.295924 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.358838 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.390547 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.484521 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.569103 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.617569 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.620865 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.666896 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.671906 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.701236 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.715656 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.834706 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.872332 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Jan 22 14:23:36 crc kubenswrapper[4948]: I0122 14:23:36.941687 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.001631 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.100535 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.114341 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.278647 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.282900 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.385255 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.521043 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.563891 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.664212 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.815169 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.832538 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.863762 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Jan 22 14:23:37 crc kubenswrapper[4948]: I0122 14:23:37.993236 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.055880 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.083303 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.115056 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.115670 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.127972 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.376769 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.396325 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.554643 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.600414 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.694433 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.859222 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.899802 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.924531 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.942897 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Jan 22 14:23:38 crc kubenswrapper[4948]: I0122 14:23:38.943450 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.013956 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.172406 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.220626 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.298760 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.397569 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.621523 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.720376 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.763440 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.782317 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.875525 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.876024 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Jan 22 14:23:39 crc kubenswrapper[4948]: I0122 14:23:39.959914 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.087483 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.107854 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.180266 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.191986 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.406997 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.510978 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.544085 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.577581 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.747277 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Jan 22 14:23:40 crc kubenswrapper[4948]: I0122 14:23:40.790658 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.004204 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.032477 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.264290 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.446339 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.756738 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.810472 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.810584 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.835908 4948 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.835982 4948 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55" exitCode=137 Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.836067 4948 scope.go:117] "RemoveContainer" containerID="15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.836221 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.857081 4948 scope.go:117] "RemoveContainer" containerID="15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55" Jan 22 14:23:41 crc kubenswrapper[4948]: E0122 14:23:41.857636 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55\": container with ID starting with 15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55 not found: ID does not exist" containerID="15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.857680 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55"} err="failed to get container status \"15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55\": rpc error: code = NotFound desc = could not find container \"15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55\": container with ID starting with 15b0b7d15c4547fc4f1e87e8dd54c6ba9acb8ca2e424a50f940e7794d5edbe55 not found: ID does not exist" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.966943 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.966988 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967029 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967099 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967122 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967183 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967380 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967498 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967528 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967938 4948 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.967980 4948 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.968008 4948 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.968078 4948 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:41 crc kubenswrapper[4948]: I0122 14:23:41.979261 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Jan 22 14:23:42 crc kubenswrapper[4948]: I0122 14:23:42.069286 4948 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Jan 22 14:23:42 crc kubenswrapper[4948]: I0122 14:23:42.823869 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Jan 22 14:23:43 crc kubenswrapper[4948]: I0122 14:23:43.077651 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Jan 22 14:23:43 crc kubenswrapper[4948]: I0122 14:23:43.098633 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Jan 22 14:23:43 crc kubenswrapper[4948]: I0122 14:23:43.473433 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Jan 22 14:23:43 crc kubenswrapper[4948]: I0122 14:23:43.774480 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Jan 22 14:23:43 crc kubenswrapper[4948]: I0122 14:23:43.965168 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Jan 22 14:23:44 crc kubenswrapper[4948]: I0122 14:23:44.772232 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.315556 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl"] Jan 22 14:23:49 crc kubenswrapper[4948]: E0122 14:23:49.316310 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" containerName="oauth-openshift" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.316337 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" containerName="oauth-openshift" Jan 22 14:23:49 crc kubenswrapper[4948]: E0122 14:23:49.316376 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" containerName="installer" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.316389 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" containerName="installer" Jan 22 14:23:49 crc kubenswrapper[4948]: E0122 14:23:49.316411 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.316424 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.316594 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.316615 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a6ff136-0872-4b4b-9104-d7c66e20d6f3" containerName="installer" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.316635 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="6d971c19-f94e-4e30-8cf4-ae0fb8be0ad6" containerName="oauth-openshift" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.317350 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.320484 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.321236 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.322746 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.322763 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.323397 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.324980 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.325394 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.325452 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.325751 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.326474 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.326860 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.326986 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.339149 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl"] Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.347971 4948 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.351946 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.357556 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.417858 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418034 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-login\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418069 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g5z94\" (UniqueName: \"kubernetes.io/projected/56459bf7-4027-4577-93e1-7d00970af04d-kube-api-access-g5z94\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418095 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418146 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418173 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-audit-policies\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418194 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-session\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418218 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418257 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-service-ca\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418280 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-error\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418304 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418344 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418370 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56459bf7-4027-4577-93e1-7d00970af04d-audit-dir\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.418394 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-router-certs\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519328 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519437 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519487 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56459bf7-4027-4577-93e1-7d00970af04d-audit-dir\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519523 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-router-certs\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519587 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519618 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-login\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519664 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519707 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g5z94\" (UniqueName: \"kubernetes.io/projected/56459bf7-4027-4577-93e1-7d00970af04d-kube-api-access-g5z94\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519785 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519849 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-audit-policies\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519911 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-session\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.519950 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.520007 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-service-ca\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.520085 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-error\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.522378 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.522402 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-cliconfig\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.522462 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/56459bf7-4027-4577-93e1-7d00970af04d-audit-dir\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.523160 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-service-ca\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.523656 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/56459bf7-4027-4577-93e1-7d00970af04d-audit-policies\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.526896 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.526913 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-session\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.527357 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-error\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.527725 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-serving-cert\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.528182 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.536746 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.536997 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-system-router-certs\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.537293 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/56459bf7-4027-4577-93e1-7d00970af04d-v4-0-config-user-template-login\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.547963 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g5z94\" (UniqueName: \"kubernetes.io/projected/56459bf7-4027-4577-93e1-7d00970af04d-kube-api-access-g5z94\") pod \"oauth-openshift-64c9cdcbb9-jhzwl\" (UID: \"56459bf7-4027-4577-93e1-7d00970af04d\") " pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:49 crc kubenswrapper[4948]: I0122 14:23:49.649177 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:50 crc kubenswrapper[4948]: I0122 14:23:50.050976 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl"] Jan 22 14:23:50 crc kubenswrapper[4948]: I0122 14:23:50.891116 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" event={"ID":"56459bf7-4027-4577-93e1-7d00970af04d","Type":"ContainerStarted","Data":"dbad35d737570b05a796ba5b5a57edb77268c31b3382b7c97168e2721f21de48"} Jan 22 14:23:50 crc kubenswrapper[4948]: I0122 14:23:50.891598 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:50 crc kubenswrapper[4948]: I0122 14:23:50.891621 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" event={"ID":"56459bf7-4027-4577-93e1-7d00970af04d","Type":"ContainerStarted","Data":"fff86445c725c34ca93d523b956bd3716dac866867f173a3f6b20cef15bd37ee"} Jan 22 14:23:50 crc kubenswrapper[4948]: I0122 14:23:50.899209 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" Jan 22 14:23:50 crc kubenswrapper[4948]: I0122 14:23:50.916219 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-64c9cdcbb9-jhzwl" podStartSLOduration=71.916195198 podStartE2EDuration="1m11.916195198s" podCreationTimestamp="2026-01-22 14:22:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:23:50.91261546 +0000 UTC m=+289.242288389" watchObservedRunningTime="2026-01-22 14:23:50.916195198 +0000 UTC m=+289.245868137" Jan 22 14:23:56 crc kubenswrapper[4948]: I0122 14:23:56.932639 4948 generic.go:334] "Generic (PLEG): container finished" podID="12fa60a3-783c-4015-b759-ab636fd6e559" containerID="d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677" exitCode=0 Jan 22 14:23:56 crc kubenswrapper[4948]: I0122 14:23:56.932714 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" event={"ID":"12fa60a3-783c-4015-b759-ab636fd6e559","Type":"ContainerDied","Data":"d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677"} Jan 22 14:23:56 crc kubenswrapper[4948]: I0122 14:23:56.933731 4948 scope.go:117] "RemoveContainer" containerID="d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677" Jan 22 14:23:57 crc kubenswrapper[4948]: I0122 14:23:57.944572 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" event={"ID":"12fa60a3-783c-4015-b759-ab636fd6e559","Type":"ContainerStarted","Data":"c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887"} Jan 22 14:23:57 crc kubenswrapper[4948]: I0122 14:23:57.947420 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:23:57 crc kubenswrapper[4948]: I0122 14:23:57.951735 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:24:01 crc kubenswrapper[4948]: I0122 14:24:01.808315 4948 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.801705 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rw7n4"] Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.803881 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.830292 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rw7n4"] Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933796 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/732a68f3-66c9-4a9c-934d-4610fadabfa8-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933850 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/732a68f3-66c9-4a9c-934d-4610fadabfa8-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933876 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/732a68f3-66c9-4a9c-934d-4610fadabfa8-trusted-ca\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933894 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-registry-tls\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933915 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74wt2\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-kube-api-access-74wt2\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933937 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933957 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-bound-sa-token\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.933978 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/732a68f3-66c9-4a9c-934d-4610fadabfa8-registry-certificates\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:47 crc kubenswrapper[4948]: I0122 14:24:47.958580 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.035134 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/732a68f3-66c9-4a9c-934d-4610fadabfa8-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.035708 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/732a68f3-66c9-4a9c-934d-4610fadabfa8-trusted-ca\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.035748 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-registry-tls\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.035784 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74wt2\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-kube-api-access-74wt2\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.035821 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-bound-sa-token\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.035853 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/732a68f3-66c9-4a9c-934d-4610fadabfa8-registry-certificates\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.035916 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/732a68f3-66c9-4a9c-934d-4610fadabfa8-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.036804 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/732a68f3-66c9-4a9c-934d-4610fadabfa8-ca-trust-extracted\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.036913 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/732a68f3-66c9-4a9c-934d-4610fadabfa8-trusted-ca\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.037398 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/732a68f3-66c9-4a9c-934d-4610fadabfa8-registry-certificates\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.042458 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/732a68f3-66c9-4a9c-934d-4610fadabfa8-installation-pull-secrets\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.050874 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-registry-tls\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.055706 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74wt2\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-kube-api-access-74wt2\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.066140 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/732a68f3-66c9-4a9c-934d-4610fadabfa8-bound-sa-token\") pod \"image-registry-66df7c8f76-rw7n4\" (UID: \"732a68f3-66c9-4a9c-934d-4610fadabfa8\") " pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.125362 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:48 crc kubenswrapper[4948]: I0122 14:24:48.366725 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-rw7n4"] Jan 22 14:24:49 crc kubenswrapper[4948]: I0122 14:24:49.281220 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" event={"ID":"732a68f3-66c9-4a9c-934d-4610fadabfa8","Type":"ContainerStarted","Data":"530d686144eb7f7d71bd5edcebb1d0e76b50b04bdbdce5fbe9b2fce4c65e7012"} Jan 22 14:24:49 crc kubenswrapper[4948]: I0122 14:24:49.281702 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" event={"ID":"732a68f3-66c9-4a9c-934d-4610fadabfa8","Type":"ContainerStarted","Data":"21173ad62f2c12eb49bbd39c7aac76c984ee4c6ccf61df819f174a383951b656"} Jan 22 14:24:49 crc kubenswrapper[4948]: I0122 14:24:49.281746 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:24:49 crc kubenswrapper[4948]: I0122 14:24:49.310724 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" podStartSLOduration=2.310705129 podStartE2EDuration="2.310705129s" podCreationTimestamp="2026-01-22 14:24:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:24:49.306640316 +0000 UTC m=+347.636313255" watchObservedRunningTime="2026-01-22 14:24:49.310705129 +0000 UTC m=+347.640378058" Jan 22 14:25:08 crc kubenswrapper[4948]: I0122 14:25:08.131307 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-rw7n4" Jan 22 14:25:08 crc kubenswrapper[4948]: I0122 14:25:08.191989 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-5mc8q"] Jan 22 14:25:11 crc kubenswrapper[4948]: I0122 14:25:11.414880 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:25:11 crc kubenswrapper[4948]: I0122 14:25:11.415352 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.384633 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h27pl"] Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.385965 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h27pl" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="registry-server" containerID="cri-o://4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a" gracePeriod=30 Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.400637 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mkngx"] Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.401196 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-mkngx" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="registry-server" containerID="cri-o://ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198" gracePeriod=30 Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.410895 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtc"] Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.411826 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" containerID="cri-o://c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887" gracePeriod=30 Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.416633 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wstpq"] Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.418445 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wstpq" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="registry-server" containerID="cri-o://01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99" gracePeriod=30 Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.422106 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fw7tk"] Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.422820 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.432386 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6wr9b"] Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.432655 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-6wr9b" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="registry-server" containerID="cri-o://f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a" gracePeriod=30 Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.476625 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fw7tk"] Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.591496 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04e2a357-8bae-449c-899a-d98fea5fc65f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.591606 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7z7s5\" (UniqueName: \"kubernetes.io/projected/04e2a357-8bae-449c-899a-d98fea5fc65f-kube-api-access-7z7s5\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.591714 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/04e2a357-8bae-449c-899a-d98fea5fc65f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.693043 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7z7s5\" (UniqueName: \"kubernetes.io/projected/04e2a357-8bae-449c-899a-d98fea5fc65f-kube-api-access-7z7s5\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.693576 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/04e2a357-8bae-449c-899a-d98fea5fc65f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.693613 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04e2a357-8bae-449c-899a-d98fea5fc65f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.695512 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/04e2a357-8bae-449c-899a-d98fea5fc65f-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.702521 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/04e2a357-8bae-449c-899a-d98fea5fc65f-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.715678 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7z7s5\" (UniqueName: \"kubernetes.io/projected/04e2a357-8bae-449c-899a-d98fea5fc65f-kube-api-access-7z7s5\") pod \"marketplace-operator-79b997595-fw7tk\" (UID: \"04e2a357-8bae-449c-899a-d98fea5fc65f\") " pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.742289 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.846214 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.853966 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.877790 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.887685 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:25:17 crc kubenswrapper[4948]: I0122 14:25:17.920486 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008068 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-98fvp\" (UniqueName: \"kubernetes.io/projected/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-kube-api-access-98fvp\") pod \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008146 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fvnj2\" (UniqueName: \"kubernetes.io/projected/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-kube-api-access-fvnj2\") pod \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008184 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-utilities\") pod \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008303 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-catalog-content\") pod \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008345 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-catalog-content\") pod \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008376 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-utilities\") pod \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\" (UID: \"b89355cf-e0e4-4a32-8519-c75e0a13c7ce\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008426 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-catalog-content\") pod \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008462 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-db5bw\" (UniqueName: \"kubernetes.io/projected/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-kube-api-access-db5bw\") pod \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\" (UID: \"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008495 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-catalog-content\") pod \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008515 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-utilities\") pod \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008530 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mpgl\" (UniqueName: \"kubernetes.io/projected/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-kube-api-access-9mpgl\") pod \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\" (UID: \"8cc0468b-0cc6-43b6-b358-72c5cb9d6050\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.008554 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-utilities\") pod \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\" (UID: \"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.009239 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-utilities" (OuterVolumeSpecName: "utilities") pod "5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" (UID: "5a4d2a63-38c9-4cd6-b02a-06c27f128cd0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.010311 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-utilities" (OuterVolumeSpecName: "utilities") pod "57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" (UID: "57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.012924 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-utilities" (OuterVolumeSpecName: "utilities") pod "8cc0468b-0cc6-43b6-b358-72c5cb9d6050" (UID: "8cc0468b-0cc6-43b6-b358-72c5cb9d6050"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.014814 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-utilities" (OuterVolumeSpecName: "utilities") pod "b89355cf-e0e4-4a32-8519-c75e0a13c7ce" (UID: "b89355cf-e0e4-4a32-8519-c75e0a13c7ce"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.017990 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-kube-api-access-98fvp" (OuterVolumeSpecName: "kube-api-access-98fvp") pod "b89355cf-e0e4-4a32-8519-c75e0a13c7ce" (UID: "b89355cf-e0e4-4a32-8519-c75e0a13c7ce"). InnerVolumeSpecName "kube-api-access-98fvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.020865 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-kube-api-access-db5bw" (OuterVolumeSpecName: "kube-api-access-db5bw") pod "5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" (UID: "5a4d2a63-38c9-4cd6-b02a-06c27f128cd0"). InnerVolumeSpecName "kube-api-access-db5bw". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.021895 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-kube-api-access-fvnj2" (OuterVolumeSpecName: "kube-api-access-fvnj2") pod "57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" (UID: "57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce"). InnerVolumeSpecName "kube-api-access-fvnj2". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.034713 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" (UID: "5a4d2a63-38c9-4cd6-b02a-06c27f128cd0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.053207 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-kube-api-access-9mpgl" (OuterVolumeSpecName: "kube-api-access-9mpgl") pod "8cc0468b-0cc6-43b6-b358-72c5cb9d6050" (UID: "8cc0468b-0cc6-43b6-b358-72c5cb9d6050"). InnerVolumeSpecName "kube-api-access-9mpgl". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.089431 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8cc0468b-0cc6-43b6-b358-72c5cb9d6050" (UID: "8cc0468b-0cc6-43b6-b358-72c5cb9d6050"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110166 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-operator-metrics\") pod \"12fa60a3-783c-4015-b759-ab636fd6e559\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110234 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-trusted-ca\") pod \"12fa60a3-783c-4015-b759-ab636fd6e559\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110352 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lkz2x\" (UniqueName: \"kubernetes.io/projected/12fa60a3-783c-4015-b759-ab636fd6e559-kube-api-access-lkz2x\") pod \"12fa60a3-783c-4015-b759-ab636fd6e559\" (UID: \"12fa60a3-783c-4015-b759-ab636fd6e559\") " Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110885 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110909 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-db5bw\" (UniqueName: \"kubernetes.io/projected/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-kube-api-access-db5bw\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110926 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110939 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110953 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mpgl\" (UniqueName: \"kubernetes.io/projected/8cc0468b-0cc6-43b6-b358-72c5cb9d6050-kube-api-access-9mpgl\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110966 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110978 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-98fvp\" (UniqueName: \"kubernetes.io/projected/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-kube-api-access-98fvp\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.110992 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fvnj2\" (UniqueName: \"kubernetes.io/projected/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-kube-api-access-fvnj2\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.111065 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.111080 4948 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-utilities\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.111270 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "12fa60a3-783c-4015-b759-ab636fd6e559" (UID: "12fa60a3-783c-4015-b759-ab636fd6e559"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.114007 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/12fa60a3-783c-4015-b759-ab636fd6e559-kube-api-access-lkz2x" (OuterVolumeSpecName: "kube-api-access-lkz2x") pod "12fa60a3-783c-4015-b759-ab636fd6e559" (UID: "12fa60a3-783c-4015-b759-ab636fd6e559"). InnerVolumeSpecName "kube-api-access-lkz2x". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.114631 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "12fa60a3-783c-4015-b759-ab636fd6e559" (UID: "12fa60a3-783c-4015-b759-ab636fd6e559"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.119804 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" (UID: "57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.176748 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b89355cf-e0e4-4a32-8519-c75e0a13c7ce" (UID: "b89355cf-e0e4-4a32-8519-c75e0a13c7ce"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.211786 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b89355cf-e0e4-4a32-8519-c75e0a13c7ce-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.212008 4948 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.212031 4948 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/12fa60a3-783c-4015-b759-ab636fd6e559-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.212041 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lkz2x\" (UniqueName: \"kubernetes.io/projected/12fa60a3-783c-4015-b759-ab636fd6e559-kube-api-access-lkz2x\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.212050 4948 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce-catalog-content\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.274351 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-fw7tk"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.455916 4948 generic.go:334] "Generic (PLEG): container finished" podID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerID="4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a" exitCode=0 Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.455989 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h27pl" event={"ID":"8cc0468b-0cc6-43b6-b358-72c5cb9d6050","Type":"ContainerDied","Data":"4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.456088 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h27pl" event={"ID":"8cc0468b-0cc6-43b6-b358-72c5cb9d6050","Type":"ContainerDied","Data":"0cc0c199741569b31c3b36b87e6d4d180fcd7b52bfbe465b2e00265bf31c346a"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.456112 4948 scope.go:117] "RemoveContainer" containerID="4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.456292 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h27pl" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.459491 4948 generic.go:334] "Generic (PLEG): container finished" podID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerID="ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198" exitCode=0 Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.459581 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mkngx" event={"ID":"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce","Type":"ContainerDied","Data":"ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.459618 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-mkngx" event={"ID":"57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce","Type":"ContainerDied","Data":"e464f339302b3f106264808b5b804b210b9fcb508d5f99800151d42be8a1693e"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.459638 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-mkngx" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.462042 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" event={"ID":"04e2a357-8bae-449c-899a-d98fea5fc65f","Type":"ContainerStarted","Data":"581b160400bb51553ca532d9096242c5fb0b32bc46d7ebf7660f0934ab6c50ab"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.462076 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" event={"ID":"04e2a357-8bae-449c-899a-d98fea5fc65f","Type":"ContainerStarted","Data":"2e8ae0a497940ce3bac6499d709814f729ebf0ef4142eacaea251fbb7ba0e79f"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.462549 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.463759 4948 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-fw7tk container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.65:8080/healthz\": dial tcp 10.217.0.65:8080: connect: connection refused" start-of-body= Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.463818 4948 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" podUID="04e2a357-8bae-449c-899a-d98fea5fc65f" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.65:8080/healthz\": dial tcp 10.217.0.65:8080: connect: connection refused" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.464289 4948 generic.go:334] "Generic (PLEG): container finished" podID="12fa60a3-783c-4015-b759-ab636fd6e559" containerID="c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887" exitCode=0 Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.464348 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" event={"ID":"12fa60a3-783c-4015-b759-ab636fd6e559","Type":"ContainerDied","Data":"c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.464366 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" event={"ID":"12fa60a3-783c-4015-b759-ab636fd6e559","Type":"ContainerDied","Data":"9f2cdfb8d5e32ab42afed652f3b9322b6570a8219dd0a57ed458d8e91e8bb306"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.464359 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7cjtc" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.466187 4948 generic.go:334] "Generic (PLEG): container finished" podID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerID="01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99" exitCode=0 Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.466264 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wstpq" event={"ID":"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0","Type":"ContainerDied","Data":"01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.466306 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wstpq" event={"ID":"5a4d2a63-38c9-4cd6-b02a-06c27f128cd0","Type":"ContainerDied","Data":"2deb4b4b8c03459390c225d8d2dac13c749130a09079c33ddf7742995586dc8f"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.466319 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wstpq" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.474140 4948 generic.go:334] "Generic (PLEG): container finished" podID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerID="f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a" exitCode=0 Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.474178 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerDied","Data":"f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.474208 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-6wr9b" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.474216 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-6wr9b" event={"ID":"b89355cf-e0e4-4a32-8519-c75e0a13c7ce","Type":"ContainerDied","Data":"27085b361928952d183866a950fd73e38fcdc7a2d807eba701cc5439f2b30154"} Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.481214 4948 scope.go:117] "RemoveContainer" containerID="0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.507598 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" podStartSLOduration=1.507576472 podStartE2EDuration="1.507576472s" podCreationTimestamp="2026-01-22 14:25:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-01-22 14:25:18.494565579 +0000 UTC m=+376.824238508" watchObservedRunningTime="2026-01-22 14:25:18.507576472 +0000 UTC m=+376.837249401" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.521118 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h27pl"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.524173 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h27pl"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.526095 4948 scope.go:117] "RemoveContainer" containerID="22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.541488 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-mkngx"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.545537 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-mkngx"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.556744 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtc"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.560623 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7cjtc"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.571037 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wstpq"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.584433 4948 scope.go:117] "RemoveContainer" containerID="4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.585077 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a\": container with ID starting with 4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a not found: ID does not exist" containerID="4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.585141 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a"} err="failed to get container status \"4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a\": rpc error: code = NotFound desc = could not find container \"4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a\": container with ID starting with 4151096fcad5e7293106c1c8e62bc1a3337f085b32167ef8a78c1cede552d05a not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.585168 4948 scope.go:117] "RemoveContainer" containerID="0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.585903 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d\": container with ID starting with 0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d not found: ID does not exist" containerID="0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.585953 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d"} err="failed to get container status \"0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d\": rpc error: code = NotFound desc = could not find container \"0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d\": container with ID starting with 0c3c693bce12adb21187481331a776acbbf613eeca91bb097c2690cfbbb5033d not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.585987 4948 scope.go:117] "RemoveContainer" containerID="22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.586747 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6\": container with ID starting with 22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6 not found: ID does not exist" containerID="22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.586765 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6"} err="failed to get container status \"22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6\": rpc error: code = NotFound desc = could not find container \"22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6\": container with ID starting with 22a089c023698726e03983bffa00b1b0d43193123dc9b9f6c9cadc49feebe2d6 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.586782 4948 scope.go:117] "RemoveContainer" containerID="ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.586822 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wstpq"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.591480 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-6wr9b"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.595403 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-6wr9b"] Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.602935 4948 scope.go:117] "RemoveContainer" containerID="bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.620486 4948 scope.go:117] "RemoveContainer" containerID="f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.634329 4948 scope.go:117] "RemoveContainer" containerID="ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.636480 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198\": container with ID starting with ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198 not found: ID does not exist" containerID="ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.636515 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198"} err="failed to get container status \"ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198\": rpc error: code = NotFound desc = could not find container \"ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198\": container with ID starting with ae695c64de56a229e7412b5382a7c4714f6ec3648c91233d7544992039fbb198 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.636542 4948 scope.go:117] "RemoveContainer" containerID="bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.636821 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d\": container with ID starting with bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d not found: ID does not exist" containerID="bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.637098 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d"} err="failed to get container status \"bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d\": rpc error: code = NotFound desc = could not find container \"bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d\": container with ID starting with bb3a42ddb85b5bebc89dbeaa494dae19b627dbc22f3dd0c8712a7c585cd0fb6d not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.637132 4948 scope.go:117] "RemoveContainer" containerID="f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.637489 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20\": container with ID starting with f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20 not found: ID does not exist" containerID="f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.637541 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20"} err="failed to get container status \"f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20\": rpc error: code = NotFound desc = could not find container \"f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20\": container with ID starting with f9f14943039348e6975a936f5349314e432000513cb7c3c3f3605e0742abef20 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.637576 4948 scope.go:117] "RemoveContainer" containerID="c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.652566 4948 scope.go:117] "RemoveContainer" containerID="d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.667216 4948 scope.go:117] "RemoveContainer" containerID="c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.668275 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887\": container with ID starting with c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887 not found: ID does not exist" containerID="c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.668326 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887"} err="failed to get container status \"c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887\": rpc error: code = NotFound desc = could not find container \"c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887\": container with ID starting with c66a4b7df4d2251cb365288de7a1b213ad31b60e9ccefa1dd7e834ba34f50887 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.668366 4948 scope.go:117] "RemoveContainer" containerID="d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.668762 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677\": container with ID starting with d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677 not found: ID does not exist" containerID="d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.668789 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677"} err="failed to get container status \"d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677\": rpc error: code = NotFound desc = could not find container \"d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677\": container with ID starting with d2fe002497f8f75222d955fcf4d498d4c3241d61e5300c10d72e3bcc909d2677 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.668806 4948 scope.go:117] "RemoveContainer" containerID="01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.681722 4948 scope.go:117] "RemoveContainer" containerID="b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.696078 4948 scope.go:117] "RemoveContainer" containerID="fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.711854 4948 scope.go:117] "RemoveContainer" containerID="01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.712286 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99\": container with ID starting with 01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99 not found: ID does not exist" containerID="01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.712323 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99"} err="failed to get container status \"01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99\": rpc error: code = NotFound desc = could not find container \"01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99\": container with ID starting with 01197ea87d7a6fbb6aac2a434bc42b1855f248637c0baf51a58f0872b0de8c99 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.712347 4948 scope.go:117] "RemoveContainer" containerID="b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.712859 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219\": container with ID starting with b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219 not found: ID does not exist" containerID="b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.712902 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219"} err="failed to get container status \"b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219\": rpc error: code = NotFound desc = could not find container \"b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219\": container with ID starting with b79c4c135fe56c89b4ce3705fe5346136d37b6c8b35064e284348e7b50571219 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.712934 4948 scope.go:117] "RemoveContainer" containerID="fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.713335 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b\": container with ID starting with fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b not found: ID does not exist" containerID="fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.713363 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b"} err="failed to get container status \"fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b\": rpc error: code = NotFound desc = could not find container \"fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b\": container with ID starting with fe957e6254bb35a97b2646b9f40ed8ce3275eac990c2cf5cbe36d40cf881576b not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.713382 4948 scope.go:117] "RemoveContainer" containerID="f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.734130 4948 scope.go:117] "RemoveContainer" containerID="53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.753254 4948 scope.go:117] "RemoveContainer" containerID="486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.767545 4948 scope.go:117] "RemoveContainer" containerID="f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.768189 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a\": container with ID starting with f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a not found: ID does not exist" containerID="f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.768236 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a"} err="failed to get container status \"f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a\": rpc error: code = NotFound desc = could not find container \"f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a\": container with ID starting with f0ef47cd26be5524b10f130d9e9cfedfb112377b7b8a573dbfba72f2aea7171a not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.768263 4948 scope.go:117] "RemoveContainer" containerID="53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.768780 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82\": container with ID starting with 53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82 not found: ID does not exist" containerID="53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.768818 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82"} err="failed to get container status \"53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82\": rpc error: code = NotFound desc = could not find container \"53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82\": container with ID starting with 53a8b49e55382ec2abb1895e783e1adaa87ee681120c9d9cc97ea037b324ce82 not found: ID does not exist" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.768844 4948 scope.go:117] "RemoveContainer" containerID="486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a" Jan 22 14:25:18 crc kubenswrapper[4948]: E0122 14:25:18.769183 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a\": container with ID starting with 486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a not found: ID does not exist" containerID="486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a" Jan 22 14:25:18 crc kubenswrapper[4948]: I0122 14:25:18.769222 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a"} err="failed to get container status \"486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a\": rpc error: code = NotFound desc = could not find container \"486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a\": container with ID starting with 486e8ad3e3363189171a1fae498dac8cd5afab51cba85c8407f3f35da89cba9a not found: ID does not exist" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.208374 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mw78s"] Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209486 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209528 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209558 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209567 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209578 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209593 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209605 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209616 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209632 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209643 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209662 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209671 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209691 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209699 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209720 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209730 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209747 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209756 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209772 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209780 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209789 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209798 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="extract-utilities" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209817 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209825 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="extract-content" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.209842 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.209850 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.210324 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.210343 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.210351 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.210371 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" containerName="registry-server" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.210387 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.210397 4948 memory_manager.go:354] "RemoveStaleState removing state" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" Jan 22 14:25:19 crc kubenswrapper[4948]: E0122 14:25:19.210986 4948 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.210999 4948 state_mem.go:107] "Deleted CPUSet assignment" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" containerName="marketplace-operator" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.214531 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.219264 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.227858 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mw78s"] Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.336086 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-catalog-content\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.336137 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gswrk\" (UniqueName: \"kubernetes.io/projected/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-kube-api-access-gswrk\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.336195 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-utilities\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.437156 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-utilities\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.437240 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-catalog-content\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.437260 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gswrk\" (UniqueName: \"kubernetes.io/projected/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-kube-api-access-gswrk\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.437886 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-utilities\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.438125 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-catalog-content\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.471565 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gswrk\" (UniqueName: \"kubernetes.io/projected/b15ffaed-73eb-4387-bcd3-3cd99af52ea9-kube-api-access-gswrk\") pod \"redhat-marketplace-mw78s\" (UID: \"b15ffaed-73eb-4387-bcd3-3cd99af52ea9\") " pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.487887 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-fw7tk" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.539937 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.967864 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="12fa60a3-783c-4015-b759-ab636fd6e559" path="/var/lib/kubelet/pods/12fa60a3-783c-4015-b759-ab636fd6e559/volumes" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.969148 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce" path="/var/lib/kubelet/pods/57f0f9c6-2db3-47ab-aa4d-3425f5efd7ce/volumes" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.969960 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5a4d2a63-38c9-4cd6-b02a-06c27f128cd0" path="/var/lib/kubelet/pods/5a4d2a63-38c9-4cd6-b02a-06c27f128cd0/volumes" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.971623 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cc0468b-0cc6-43b6-b358-72c5cb9d6050" path="/var/lib/kubelet/pods/8cc0468b-0cc6-43b6-b358-72c5cb9d6050/volumes" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.972913 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b89355cf-e0e4-4a32-8519-c75e0a13c7ce" path="/var/lib/kubelet/pods/b89355cf-e0e4-4a32-8519-c75e0a13c7ce/volumes" Jan 22 14:25:19 crc kubenswrapper[4948]: I0122 14:25:19.985167 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mw78s"] Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.203601 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-bjzwk"] Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.205154 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.209091 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.226464 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bjzwk"] Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.249140 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-utilities\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.249271 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qjxn\" (UniqueName: \"kubernetes.io/projected/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-kube-api-access-7qjxn\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.249342 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-catalog-content\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.350471 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-utilities\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.350527 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qjxn\" (UniqueName: \"kubernetes.io/projected/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-kube-api-access-7qjxn\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.350567 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-catalog-content\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.350918 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-utilities\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.350935 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-catalog-content\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.378114 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qjxn\" (UniqueName: \"kubernetes.io/projected/a2574f4a-cbcb-4305-8cb9-18ab7729c6da-kube-api-access-7qjxn\") pod \"redhat-operators-bjzwk\" (UID: \"a2574f4a-cbcb-4305-8cb9-18ab7729c6da\") " pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.492451 4948 generic.go:334] "Generic (PLEG): container finished" podID="b15ffaed-73eb-4387-bcd3-3cd99af52ea9" containerID="ec1f0b5e397d5716fee1845eed97afeef253f20f296f7830b9f7560c781b71f4" exitCode=0 Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.492529 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mw78s" event={"ID":"b15ffaed-73eb-4387-bcd3-3cd99af52ea9","Type":"ContainerDied","Data":"ec1f0b5e397d5716fee1845eed97afeef253f20f296f7830b9f7560c781b71f4"} Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.492800 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mw78s" event={"ID":"b15ffaed-73eb-4387-bcd3-3cd99af52ea9","Type":"ContainerStarted","Data":"dd01001a329e5a9f4d038f8ddbf78a4cc58c68fb0122646c8b16c9a0ca5a2965"} Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.531769 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:20 crc kubenswrapper[4948]: I0122 14:25:20.969197 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-bjzwk"] Jan 22 14:25:20 crc kubenswrapper[4948]: W0122 14:25:20.971917 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2574f4a_cbcb_4305_8cb9_18ab7729c6da.slice/crio-3b8697b1bc3ba336c8131e241f68b6fca9ffbb392d9af778d92a6f7a43be2202 WatchSource:0}: Error finding container 3b8697b1bc3ba336c8131e241f68b6fca9ffbb392d9af778d92a6f7a43be2202: Status 404 returned error can't find the container with id 3b8697b1bc3ba336c8131e241f68b6fca9ffbb392d9af778d92a6f7a43be2202 Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.500773 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjzwk" event={"ID":"a2574f4a-cbcb-4305-8cb9-18ab7729c6da","Type":"ContainerDied","Data":"df5d93363cf5dcae7276f89ecfa026f6a05fae3b5fd4181d8a87ca20708f726f"} Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.500935 4948 generic.go:334] "Generic (PLEG): container finished" podID="a2574f4a-cbcb-4305-8cb9-18ab7729c6da" containerID="df5d93363cf5dcae7276f89ecfa026f6a05fae3b5fd4181d8a87ca20708f726f" exitCode=0 Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.501226 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjzwk" event={"ID":"a2574f4a-cbcb-4305-8cb9-18ab7729c6da","Type":"ContainerStarted","Data":"3b8697b1bc3ba336c8131e241f68b6fca9ffbb392d9af778d92a6f7a43be2202"} Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.503684 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mw78s" event={"ID":"b15ffaed-73eb-4387-bcd3-3cd99af52ea9","Type":"ContainerStarted","Data":"83960f15ce850cc95d066a8093a5e79bab33e69a0c0b9fae2b8aa5f9e0d591b2"} Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.608086 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-tmvtm"] Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.609287 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.613557 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.631954 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tmvtm"] Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.775515 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-catalog-content\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.775634 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-utilities\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.775666 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86b5s\" (UniqueName: \"kubernetes.io/projected/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-kube-api-access-86b5s\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.876741 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-utilities\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.877031 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86b5s\" (UniqueName: \"kubernetes.io/projected/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-kube-api-access-86b5s\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.877160 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-catalog-content\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.877635 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-catalog-content\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.877719 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-utilities\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.897513 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86b5s\" (UniqueName: \"kubernetes.io/projected/c44a8d29-c9a7-491a-8fc9-2580a857f1b9-kube-api-access-86b5s\") pod \"certified-operators-tmvtm\" (UID: \"c44a8d29-c9a7-491a-8fc9-2580a857f1b9\") " pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:21 crc kubenswrapper[4948]: I0122 14:25:21.934391 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.144484 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-tmvtm"] Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.511458 4948 generic.go:334] "Generic (PLEG): container finished" podID="b15ffaed-73eb-4387-bcd3-3cd99af52ea9" containerID="83960f15ce850cc95d066a8093a5e79bab33e69a0c0b9fae2b8aa5f9e0d591b2" exitCode=0 Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.511540 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mw78s" event={"ID":"b15ffaed-73eb-4387-bcd3-3cd99af52ea9","Type":"ContainerDied","Data":"83960f15ce850cc95d066a8093a5e79bab33e69a0c0b9fae2b8aa5f9e0d591b2"} Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.514967 4948 generic.go:334] "Generic (PLEG): container finished" podID="c44a8d29-c9a7-491a-8fc9-2580a857f1b9" containerID="8fa8ce95aff790d4c71d2c964bdea7b6cb7d7d17f23a333c0dd5da98786d113e" exitCode=0 Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.515045 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmvtm" event={"ID":"c44a8d29-c9a7-491a-8fc9-2580a857f1b9","Type":"ContainerDied","Data":"8fa8ce95aff790d4c71d2c964bdea7b6cb7d7d17f23a333c0dd5da98786d113e"} Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.515098 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmvtm" event={"ID":"c44a8d29-c9a7-491a-8fc9-2580a857f1b9","Type":"ContainerStarted","Data":"963aacb43f37e0e85f6f11f9b6ebe2ead2b6078626771df0732d21a9809827c3"} Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.516413 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjzwk" event={"ID":"a2574f4a-cbcb-4305-8cb9-18ab7729c6da","Type":"ContainerStarted","Data":"fdd18d1ad4a3f2d511660702b5923630bf1eda75f77478715ff209232e433689"} Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.608273 4948 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-p55rs"] Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.609576 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.612258 4948 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.626804 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p55rs"] Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.689190 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf9tk\" (UniqueName: \"kubernetes.io/projected/ca28f2d2-83e9-4616-8266-3ab852705aeb-kube-api-access-jf9tk\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.689248 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca28f2d2-83e9-4616-8266-3ab852705aeb-utilities\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.689279 4948 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca28f2d2-83e9-4616-8266-3ab852705aeb-catalog-content\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.790942 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf9tk\" (UniqueName: \"kubernetes.io/projected/ca28f2d2-83e9-4616-8266-3ab852705aeb-kube-api-access-jf9tk\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.791266 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca28f2d2-83e9-4616-8266-3ab852705aeb-utilities\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.791388 4948 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca28f2d2-83e9-4616-8266-3ab852705aeb-catalog-content\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.791773 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ca28f2d2-83e9-4616-8266-3ab852705aeb-utilities\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.791773 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ca28f2d2-83e9-4616-8266-3ab852705aeb-catalog-content\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.809450 4948 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf9tk\" (UniqueName: \"kubernetes.io/projected/ca28f2d2-83e9-4616-8266-3ab852705aeb-kube-api-access-jf9tk\") pod \"community-operators-p55rs\" (UID: \"ca28f2d2-83e9-4616-8266-3ab852705aeb\") " pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:22 crc kubenswrapper[4948]: I0122 14:25:22.937303 4948 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.202601 4948 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-p55rs"] Jan 22 14:25:23 crc kubenswrapper[4948]: W0122 14:25:23.206942 4948 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podca28f2d2_83e9_4616_8266_3ab852705aeb.slice/crio-f09d046d8ad95f65fb32f2b4189cdd1a747a0ca21975bf2e5fda683415d4275c WatchSource:0}: Error finding container f09d046d8ad95f65fb32f2b4189cdd1a747a0ca21975bf2e5fda683415d4275c: Status 404 returned error can't find the container with id f09d046d8ad95f65fb32f2b4189cdd1a747a0ca21975bf2e5fda683415d4275c Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.523631 4948 generic.go:334] "Generic (PLEG): container finished" podID="a2574f4a-cbcb-4305-8cb9-18ab7729c6da" containerID="fdd18d1ad4a3f2d511660702b5923630bf1eda75f77478715ff209232e433689" exitCode=0 Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.523714 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjzwk" event={"ID":"a2574f4a-cbcb-4305-8cb9-18ab7729c6da","Type":"ContainerDied","Data":"fdd18d1ad4a3f2d511660702b5923630bf1eda75f77478715ff209232e433689"} Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.527341 4948 generic.go:334] "Generic (PLEG): container finished" podID="ca28f2d2-83e9-4616-8266-3ab852705aeb" containerID="60ae06d2e4d7202b7a0d3508191444fc8eeec05399a024648b82352281230048" exitCode=0 Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.527384 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p55rs" event={"ID":"ca28f2d2-83e9-4616-8266-3ab852705aeb","Type":"ContainerDied","Data":"60ae06d2e4d7202b7a0d3508191444fc8eeec05399a024648b82352281230048"} Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.527402 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p55rs" event={"ID":"ca28f2d2-83e9-4616-8266-3ab852705aeb","Type":"ContainerStarted","Data":"f09d046d8ad95f65fb32f2b4189cdd1a747a0ca21975bf2e5fda683415d4275c"} Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.531639 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mw78s" event={"ID":"b15ffaed-73eb-4387-bcd3-3cd99af52ea9","Type":"ContainerStarted","Data":"a5e364e541508228be524c357cdbcb94b26130a857fc46ab760f6b3c8aba4aa5"} Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.536703 4948 generic.go:334] "Generic (PLEG): container finished" podID="c44a8d29-c9a7-491a-8fc9-2580a857f1b9" containerID="c0598c3f894800d8af4e87d0c0746c2094578282aa56974ea9967d76f78274c3" exitCode=0 Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.536743 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmvtm" event={"ID":"c44a8d29-c9a7-491a-8fc9-2580a857f1b9","Type":"ContainerDied","Data":"c0598c3f894800d8af4e87d0c0746c2094578282aa56974ea9967d76f78274c3"} Jan 22 14:25:23 crc kubenswrapper[4948]: I0122 14:25:23.560295 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mw78s" podStartSLOduration=2.066530232 podStartE2EDuration="4.560279474s" podCreationTimestamp="2026-01-22 14:25:19 +0000 UTC" firstStartedPulling="2026-01-22 14:25:20.494898202 +0000 UTC m=+378.824571131" lastFinishedPulling="2026-01-22 14:25:22.988647444 +0000 UTC m=+381.318320373" observedRunningTime="2026-01-22 14:25:23.55690146 +0000 UTC m=+381.886574389" watchObservedRunningTime="2026-01-22 14:25:23.560279474 +0000 UTC m=+381.889952403" Jan 22 14:25:24 crc kubenswrapper[4948]: I0122 14:25:24.544003 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-bjzwk" event={"ID":"a2574f4a-cbcb-4305-8cb9-18ab7729c6da","Type":"ContainerStarted","Data":"426ea4b08a895a4ec69996ed736a517dbd5061ecd2b86952c98be79dedde655d"} Jan 22 14:25:24 crc kubenswrapper[4948]: I0122 14:25:24.561644 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-bjzwk" podStartSLOduration=2.107855749 podStartE2EDuration="4.561627894s" podCreationTimestamp="2026-01-22 14:25:20 +0000 UTC" firstStartedPulling="2026-01-22 14:25:21.505170942 +0000 UTC m=+379.834843871" lastFinishedPulling="2026-01-22 14:25:23.958943087 +0000 UTC m=+382.288616016" observedRunningTime="2026-01-22 14:25:24.561319826 +0000 UTC m=+382.890992765" watchObservedRunningTime="2026-01-22 14:25:24.561627894 +0000 UTC m=+382.891300823" Jan 22 14:25:25 crc kubenswrapper[4948]: I0122 14:25:25.551147 4948 generic.go:334] "Generic (PLEG): container finished" podID="ca28f2d2-83e9-4616-8266-3ab852705aeb" containerID="e85f4aa2640eb2d804518277c119f19e6309da509f5aa3644dff3da6a0fa76b4" exitCode=0 Jan 22 14:25:25 crc kubenswrapper[4948]: I0122 14:25:25.551218 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p55rs" event={"ID":"ca28f2d2-83e9-4616-8266-3ab852705aeb","Type":"ContainerDied","Data":"e85f4aa2640eb2d804518277c119f19e6309da509f5aa3644dff3da6a0fa76b4"} Jan 22 14:25:25 crc kubenswrapper[4948]: I0122 14:25:25.557777 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-tmvtm" event={"ID":"c44a8d29-c9a7-491a-8fc9-2580a857f1b9","Type":"ContainerStarted","Data":"d47368a5af14f2f1e021220556936085f5e61715cef28d1178b00fc5dba1a1d2"} Jan 22 14:25:25 crc kubenswrapper[4948]: I0122 14:25:25.588075 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-tmvtm" podStartSLOduration=2.195057374 podStartE2EDuration="4.588057714s" podCreationTimestamp="2026-01-22 14:25:21 +0000 UTC" firstStartedPulling="2026-01-22 14:25:22.517156729 +0000 UTC m=+380.846829658" lastFinishedPulling="2026-01-22 14:25:24.910157079 +0000 UTC m=+383.239829998" observedRunningTime="2026-01-22 14:25:25.58576679 +0000 UTC m=+383.915439729" watchObservedRunningTime="2026-01-22 14:25:25.588057714 +0000 UTC m=+383.917730643" Jan 22 14:25:26 crc kubenswrapper[4948]: I0122 14:25:26.566070 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-p55rs" event={"ID":"ca28f2d2-83e9-4616-8266-3ab852705aeb","Type":"ContainerStarted","Data":"0e551856117d21fa764f986e7aed8d2efcc92f8f07487eda5325e4702fa829e9"} Jan 22 14:25:26 crc kubenswrapper[4948]: I0122 14:25:26.588949 4948 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-p55rs" podStartSLOduration=1.996855885 podStartE2EDuration="4.588926241s" podCreationTimestamp="2026-01-22 14:25:22 +0000 UTC" firstStartedPulling="2026-01-22 14:25:23.533060934 +0000 UTC m=+381.862733863" lastFinishedPulling="2026-01-22 14:25:26.12513129 +0000 UTC m=+384.454804219" observedRunningTime="2026-01-22 14:25:26.584633821 +0000 UTC m=+384.914306750" watchObservedRunningTime="2026-01-22 14:25:26.588926241 +0000 UTC m=+384.918599180" Jan 22 14:25:29 crc kubenswrapper[4948]: I0122 14:25:29.540981 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:29 crc kubenswrapper[4948]: I0122 14:25:29.541536 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:29 crc kubenswrapper[4948]: I0122 14:25:29.585651 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:29 crc kubenswrapper[4948]: I0122 14:25:29.627785 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mw78s" Jan 22 14:25:30 crc kubenswrapper[4948]: I0122 14:25:30.532839 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:30 crc kubenswrapper[4948]: I0122 14:25:30.532893 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:30 crc kubenswrapper[4948]: I0122 14:25:30.572371 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:30 crc kubenswrapper[4948]: I0122 14:25:30.633416 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-bjzwk" Jan 22 14:25:31 crc kubenswrapper[4948]: I0122 14:25:31.935630 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:31 crc kubenswrapper[4948]: I0122 14:25:31.936196 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:31 crc kubenswrapper[4948]: I0122 14:25:31.980175 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:32 crc kubenswrapper[4948]: I0122 14:25:32.645448 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-tmvtm" Jan 22 14:25:32 crc kubenswrapper[4948]: I0122 14:25:32.937941 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:32 crc kubenswrapper[4948]: I0122 14:25:32.938846 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:32 crc kubenswrapper[4948]: I0122 14:25:32.989226 4948 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:33 crc kubenswrapper[4948]: I0122 14:25:33.249587 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" podUID="05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" containerName="registry" containerID="cri-o://92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd" gracePeriod=30 Jan 22 14:25:33 crc kubenswrapper[4948]: I0122 14:25:33.650599 4948 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-p55rs" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.108718 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201590 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-trusted-ca\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201768 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201813 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-tls\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201842 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-certificates\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201883 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-ca-trust-extracted\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201912 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-installation-pull-secrets\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201933 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbnp8\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-kube-api-access-qbnp8\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.201954 4948 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-bound-sa-token\") pod \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\" (UID: \"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e\") " Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.204047 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.205123 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.209388 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.209588 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.211289 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.211526 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-kube-api-access-qbnp8" (OuterVolumeSpecName: "kube-api-access-qbnp8") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "kube-api-access-qbnp8". PluginName "kubernetes.io/projected", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.216744 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.221685 4948 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" (UID: "05ff4ada-a73e-4df8-8ceb-fa133b52fa6e"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.303102 4948 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-trusted-ca\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.303146 4948 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-tls\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.303159 4948 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-registry-certificates\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.303174 4948 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.303185 4948 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.303196 4948 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbnp8\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-kube-api-access-qbnp8\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.303206 4948 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e-bound-sa-token\") on node \"crc\" DevicePath \"\"" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.613092 4948 generic.go:334] "Generic (PLEG): container finished" podID="05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" containerID="92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd" exitCode=0 Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.613488 4948 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.613578 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" event={"ID":"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e","Type":"ContainerDied","Data":"92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd"} Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.613635 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-5mc8q" event={"ID":"05ff4ada-a73e-4df8-8ceb-fa133b52fa6e","Type":"ContainerDied","Data":"4eea9a01aabdb32df16923f3bf1383567a5c5a05c97df009b46b2e609992a6e6"} Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.613661 4948 scope.go:117] "RemoveContainer" containerID="92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.635001 4948 scope.go:117] "RemoveContainer" containerID="92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd" Jan 22 14:25:34 crc kubenswrapper[4948]: E0122 14:25:34.635842 4948 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd\": container with ID starting with 92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd not found: ID does not exist" containerID="92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.635942 4948 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd"} err="failed to get container status \"92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd\": rpc error: code = NotFound desc = could not find container \"92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd\": container with ID starting with 92ceb89c260397e716d48dc32b706fb76b78bf2bc6b694da0c621261024986dd not found: ID does not exist" Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.655040 4948 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-5mc8q"] Jan 22 14:25:34 crc kubenswrapper[4948]: I0122 14:25:34.660790 4948 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-5mc8q"] Jan 22 14:25:35 crc kubenswrapper[4948]: I0122 14:25:35.963910 4948 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05ff4ada-a73e-4df8-8ceb-fa133b52fa6e" path="/var/lib/kubelet/pods/05ff4ada-a73e-4df8-8ceb-fa133b52fa6e/volumes" Jan 22 14:25:41 crc kubenswrapper[4948]: I0122 14:25:41.414268 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:25:41 crc kubenswrapper[4948]: I0122 14:25:41.414926 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.415331 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.416221 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.416301 4948 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.417072 4948 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"836de0e5942dea29937e5a32fc50b884228d1d0096c1c8709d05b93b6a8d1e77"} pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.417167 4948 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" containerID="cri-o://836de0e5942dea29937e5a32fc50b884228d1d0096c1c8709d05b93b6a8d1e77" gracePeriod=600 Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.848560 4948 generic.go:334] "Generic (PLEG): container finished" podID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerID="836de0e5942dea29937e5a32fc50b884228d1d0096c1c8709d05b93b6a8d1e77" exitCode=0 Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.848664 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" event={"ID":"355ee4ab-8609-4b0c-ad77-bff335632d2d","Type":"ContainerDied","Data":"836de0e5942dea29937e5a32fc50b884228d1d0096c1c8709d05b93b6a8d1e77"} Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.849101 4948 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" event={"ID":"355ee4ab-8609-4b0c-ad77-bff335632d2d","Type":"ContainerStarted","Data":"068fa34581cd8fb8822a36d4347a2bbec6dc1e7285ba98cba90f74851b242fb8"} Jan 22 14:26:11 crc kubenswrapper[4948]: I0122 14:26:11.849135 4948 scope.go:117] "RemoveContainer" containerID="a0792e3bb0f666cc40a40d9ab7e377e7812cf0668b20c9883da146cce30d29c1" Jan 22 14:28:11 crc kubenswrapper[4948]: I0122 14:28:11.415972 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:28:11 crc kubenswrapper[4948]: I0122 14:28:11.416846 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Jan 22 14:28:41 crc kubenswrapper[4948]: I0122 14:28:41.414797 4948 patch_prober.go:28] interesting pod/machine-config-daemon-ldzzt container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Jan 22 14:28:41 crc kubenswrapper[4948]: I0122 14:28:41.415462 4948 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-ldzzt" podUID="355ee4ab-8609-4b0c-ad77-bff335632d2d" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515134432047024450 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015134432050017357 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015134430406016505 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015134430407015456 5ustar corecore